diff --git a/experimental/python/_sources/databricks.bundles.core.rst.txt b/experimental/python/_sources/databricks.bundles.core.rst.txt
new file mode 100644
index 0000000000..f1a2636e8f
--- /dev/null
+++ b/experimental/python/_sources/databricks.bundles.core.rst.txt
@@ -0,0 +1,36 @@
+Core
+===============================
+
+.. currentmodule:: databricks.bundles.core
+
+**Package:** ``databricks.bundles.core``
+
+Classes
+-----------
+
+.. autoclass:: databricks.bundles.core.Resources
+.. autoclass:: databricks.bundles.core.Resource
+.. autoclass:: databricks.bundles.core.ResourceMutator
+.. autoclass:: databricks.bundles.core.Bundle
+.. autoclass:: databricks.bundles.core.Variable
+.. autoclass:: databricks.bundles.core.Diagnostics
+.. autoclass:: databricks.bundles.core.Diagnostic
+.. autoclass:: databricks.bundles.core.Location
+.. autoclass:: databricks.bundles.core.Severity
+
+.. class:: T
+
+ :class:`~typing.TypeVar` for variable value
+
+Methods
+-----------
+
+.. automethod:: databricks.bundles.core.load_resources_from_current_package_module
+.. automethod:: databricks.bundles.core.load_resources_from_module
+.. automethod:: databricks.bundles.core.load_resources_from_modules
+.. automethod:: databricks.bundles.core.load_resources_from_package_module
+
+Decorators
+-----------
+.. autodecorator:: databricks.bundles.core.job_mutator
+.. autodecorator:: databricks.bundles.core.variables
diff --git a/experimental/python/_sources/databricks.bundles.jobs.rst.txt b/experimental/python/_sources/databricks.bundles.jobs.rst.txt
new file mode 100644
index 0000000000..210e4d1c02
--- /dev/null
+++ b/experimental/python/_sources/databricks.bundles.jobs.rst.txt
@@ -0,0 +1,14 @@
+Jobs
+===============================
+
+.. currentmodule:: databricks.bundles.jobs
+
+**Package:** ``databricks.bundles.jobs``
+
+.. comment TODO
+.. comment .. autodecorator:: job_mutator
+
+Classes
+---------------
+
+.. include:: __generated__/databricks.bundles.jobs.rst
diff --git a/experimental/python/_sources/index.rst.txt b/experimental/python/_sources/index.rst.txt
new file mode 100644
index 0000000000..d10c6727d5
--- /dev/null
+++ b/experimental/python/_sources/index.rst.txt
@@ -0,0 +1,10 @@
+databricks-bundles (Beta)
+--------------------------------
+
+`databricks-bundles` package implements Python support for Databricks Asset Bundles.
+
+.. toctree::
+ :maxdepth: 7
+
+ databricks.bundles.core
+ databricks.bundles.jobs
diff --git a/experimental/python/_static/alabaster.css b/experimental/python/_static/alabaster.css
new file mode 100644
index 0000000000..e8df33a1e6
--- /dev/null
+++ b/experimental/python/_static/alabaster.css
@@ -0,0 +1,664 @@
+/* -- page layout ----------------------------------------------------------- */
+
+body {
+ font-family: Georgia, serif;
+ font-size: 17px;
+ background-color: #fff;
+ color: #000;
+ margin: 0;
+ padding: 0;
+}
+
+
+div.document {
+ width: 940px;
+ margin: 30px auto 0 auto;
+}
+
+div.documentwrapper {
+ float: left;
+ width: 100%;
+}
+
+div.bodywrapper {
+ margin: 0 0 0 220px;
+}
+
+div.sphinxsidebar {
+ width: 220px;
+ font-size: 14px;
+ line-height: 1.5;
+}
+
+hr {
+ border: 1px solid #B1B4B6;
+}
+
+div.body {
+ background-color: #fff;
+ color: #3E4349;
+ padding: 0 30px 0 30px;
+}
+
+div.body > .section {
+ text-align: left;
+}
+
+div.footer {
+ width: 940px;
+ margin: 20px auto 30px auto;
+ font-size: 14px;
+ color: #888;
+ text-align: right;
+}
+
+div.footer a {
+ color: #888;
+}
+
+p.caption {
+ font-family: inherit;
+ font-size: inherit;
+}
+
+
+
+div.sphinxsidebar {
+ max-height: 100%;
+ overflow-y: auto;
+}
+
+div.sphinxsidebar a {
+ color: #444;
+ text-decoration: none;
+ border-bottom: 1px dotted #999;
+}
+
+div.sphinxsidebar a:hover {
+ border-bottom: 1px solid #999;
+}
+
+div.sphinxsidebarwrapper {
+ padding: 18px 10px;
+}
+
+div.sphinxsidebarwrapper p.logo {
+ padding: 0;
+ margin: -10px 0 0 0px;
+ text-align: center;
+}
+
+div.sphinxsidebarwrapper h1.logo {
+ margin-top: -10px;
+ text-align: center;
+ margin-bottom: 5px;
+ text-align: center;
+}
+
+div.sphinxsidebarwrapper h1.logo-name {
+ margin-top: 0px;
+}
+
+div.sphinxsidebarwrapper p.blurb {
+ margin-top: 0;
+ font-style: normal;
+}
+
+div.sphinxsidebar h3,
+div.sphinxsidebar h4 {
+ font-family: Georgia, serif;
+ color: #444;
+ font-size: 24px;
+ font-weight: normal;
+ margin: 0 0 5px 0;
+ padding: 0;
+}
+
+div.sphinxsidebar h4 {
+ font-size: 20px;
+}
+
+div.sphinxsidebar h3 a {
+ color: #444;
+}
+
+div.sphinxsidebar p.logo a,
+div.sphinxsidebar h3 a,
+div.sphinxsidebar p.logo a:hover,
+div.sphinxsidebar h3 a:hover {
+ border: none;
+}
+
+div.sphinxsidebar p {
+ color: #555;
+ margin: 10px 0;
+}
+
+div.sphinxsidebar ul {
+ margin: 10px 0;
+ padding: 0;
+ color: #000;
+}
+
+div.sphinxsidebar ul li.toctree-l1 > a {
+ font-size: 120%;
+}
+
+div.sphinxsidebar ul li.toctree-l2 > a {
+ font-size: 110%;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #CCC;
+ font-family: Georgia, serif;
+ font-size: 1em;
+}
+
+div.sphinxsidebar #searchbox {
+ margin: 1em 0;
+}
+
+div.sphinxsidebar .search > div {
+ display: table-cell;
+}
+
+div.sphinxsidebar hr {
+ border: none;
+ height: 1px;
+ color: #AAA;
+ background: #AAA;
+
+ text-align: left;
+ margin-left: 0;
+ width: 50%;
+}
+
+div.sphinxsidebar .badge {
+ border-bottom: none;
+}
+
+div.sphinxsidebar .badge:hover {
+ border-bottom: none;
+}
+
+/* To address an issue with donation coming after search */
+div.sphinxsidebar h3.donation {
+ margin-top: 10px;
+}
+
+/* -- body styles ----------------------------------------------------------- */
+
+a {
+ color: #004B6B;
+ text-decoration: underline;
+}
+
+a:hover {
+ color: #6D4100;
+ text-decoration: underline;
+}
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+ font-family: Georgia, serif;
+ font-weight: normal;
+ margin: 30px 0px 10px 0px;
+ padding: 0;
+}
+
+div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; }
+div.body h2 { font-size: 180%; }
+div.body h3 { font-size: 150%; }
+div.body h4 { font-size: 130%; }
+div.body h5 { font-size: 100%; }
+div.body h6 { font-size: 100%; }
+
+a.headerlink {
+ color: #DDD;
+ padding: 0 4px;
+ text-decoration: none;
+}
+
+a.headerlink:hover {
+ color: #444;
+ background: #EAEAEA;
+}
+
+div.body p, div.body dd, div.body li {
+ line-height: 1.4em;
+}
+
+div.admonition {
+ margin: 20px 0px;
+ padding: 10px 30px;
+ background-color: #EEE;
+ border: 1px solid #CCC;
+}
+
+div.admonition tt.xref, div.admonition code.xref, div.admonition a tt {
+ background-color: #FBFBFB;
+ border-bottom: 1px solid #fafafa;
+}
+
+div.admonition p.admonition-title {
+ font-family: Georgia, serif;
+ font-weight: normal;
+ font-size: 24px;
+ margin: 0 0 10px 0;
+ padding: 0;
+ line-height: 1;
+}
+
+div.admonition p.last {
+ margin-bottom: 0;
+}
+
+dt:target, .highlight {
+ background: #FAF3E8;
+}
+
+div.warning {
+ background-color: #FCC;
+ border: 1px solid #FAA;
+}
+
+div.danger {
+ background-color: #FCC;
+ border: 1px solid #FAA;
+ -moz-box-shadow: 2px 2px 4px #D52C2C;
+ -webkit-box-shadow: 2px 2px 4px #D52C2C;
+ box-shadow: 2px 2px 4px #D52C2C;
+}
+
+div.error {
+ background-color: #FCC;
+ border: 1px solid #FAA;
+ -moz-box-shadow: 2px 2px 4px #D52C2C;
+ -webkit-box-shadow: 2px 2px 4px #D52C2C;
+ box-shadow: 2px 2px 4px #D52C2C;
+}
+
+div.caution {
+ background-color: #FCC;
+ border: 1px solid #FAA;
+}
+
+div.attention {
+ background-color: #FCC;
+ border: 1px solid #FAA;
+}
+
+div.important {
+ background-color: #EEE;
+ border: 1px solid #CCC;
+}
+
+div.note {
+ background-color: #EEE;
+ border: 1px solid #CCC;
+}
+
+div.tip {
+ background-color: #EEE;
+ border: 1px solid #CCC;
+}
+
+div.hint {
+ background-color: #EEE;
+ border: 1px solid #CCC;
+}
+
+div.seealso {
+ background-color: #EEE;
+ border: 1px solid #CCC;
+}
+
+div.topic {
+ background-color: #EEE;
+}
+
+p.admonition-title {
+ display: inline;
+}
+
+p.admonition-title:after {
+ content: ":";
+}
+
+pre, tt, code {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+ font-size: 0.9em;
+}
+
+.hll {
+ background-color: #FFC;
+ margin: 0 -12px;
+ padding: 0 12px;
+ display: block;
+}
+
+img.screenshot {
+}
+
+tt.descname, tt.descclassname, code.descname, code.descclassname {
+ font-size: 0.95em;
+}
+
+tt.descname, code.descname {
+ padding-right: 0.08em;
+}
+
+img.screenshot {
+ -moz-box-shadow: 2px 2px 4px #EEE;
+ -webkit-box-shadow: 2px 2px 4px #EEE;
+ box-shadow: 2px 2px 4px #EEE;
+}
+
+table.docutils {
+ border: 1px solid #888;
+ -moz-box-shadow: 2px 2px 4px #EEE;
+ -webkit-box-shadow: 2px 2px 4px #EEE;
+ box-shadow: 2px 2px 4px #EEE;
+}
+
+table.docutils td, table.docutils th {
+ border: 1px solid #888;
+ padding: 0.25em 0.7em;
+}
+
+table.field-list, table.footnote {
+ border: none;
+ -moz-box-shadow: none;
+ -webkit-box-shadow: none;
+ box-shadow: none;
+}
+
+table.footnote {
+ margin: 15px 0;
+ width: 100%;
+ border: 1px solid #EEE;
+ background: #FDFDFD;
+ font-size: 0.9em;
+}
+
+table.footnote + table.footnote {
+ margin-top: -15px;
+ border-top: none;
+}
+
+table.field-list th {
+ padding: 0 0.8em 0 0;
+}
+
+table.field-list td {
+ padding: 0;
+}
+
+table.field-list p {
+ margin-bottom: 0.8em;
+}
+
+/* Cloned from
+ * https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68
+ */
+.field-name {
+ -moz-hyphens: manual;
+ -ms-hyphens: manual;
+ -webkit-hyphens: manual;
+ hyphens: manual;
+}
+
+table.footnote td.label {
+ width: .1px;
+ padding: 0.3em 0 0.3em 0.5em;
+}
+
+table.footnote td {
+ padding: 0.3em 0.5em;
+}
+
+dl {
+ margin-left: 0;
+ margin-right: 0;
+ margin-top: 0;
+ padding: 0;
+}
+
+dl dd {
+ margin-left: 30px;
+}
+
+blockquote {
+ margin: 0 0 0 30px;
+ padding: 0;
+}
+
+ul, ol {
+ /* Matches the 30px from the narrow-screen "li > ul" selector below */
+ margin: 10px 0 10px 30px;
+ padding: 0;
+}
+
+pre {
+ background: unset;
+ padding: 7px 30px;
+ margin: 15px 0px;
+ line-height: 1.3em;
+}
+
+div.viewcode-block:target {
+ background: #ffd;
+}
+
+dl pre, blockquote pre, li pre {
+ margin-left: 0;
+ padding-left: 30px;
+}
+
+tt, code {
+ background-color: #ecf0f3;
+ color: #222;
+ /* padding: 1px 2px; */
+}
+
+tt.xref, code.xref, a tt {
+ background-color: #FBFBFB;
+ border-bottom: 1px solid #fff;
+}
+
+a.reference {
+ text-decoration: none;
+ border-bottom: 1px dotted #004B6B;
+}
+
+a.reference:hover {
+ border-bottom: 1px solid #6D4100;
+}
+
+/* Don't put an underline on images */
+a.image-reference, a.image-reference:hover {
+ border-bottom: none;
+}
+
+a.footnote-reference {
+ text-decoration: none;
+ font-size: 0.7em;
+ vertical-align: top;
+ border-bottom: 1px dotted #004B6B;
+}
+
+a.footnote-reference:hover {
+ border-bottom: 1px solid #6D4100;
+}
+
+a:hover tt, a:hover code {
+ background: #EEE;
+}
+div.sphinxsidebar {
+ position: fixed;
+ margin-left: 0;
+}
+
+@media screen and (max-width: 940px) {
+
+ body {
+ margin: 0;
+ padding: 20px 30px;
+ }
+
+ div.documentwrapper {
+ float: none;
+ background: #fff;
+ margin-left: 0;
+ margin-top: 0;
+ margin-right: 0;
+ margin-bottom: 0;
+ }
+
+ div.sphinxsidebar {
+ display: block;
+ float: none;
+ width: unset;
+ margin: -20px -30px 20px -30px;
+ position: static;
+ padding: 10px 20px;
+ background: #333;
+ color: #FFF;
+ }
+
+ div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p,
+ div.sphinxsidebar h3 a {
+ color: #fff;
+ }
+
+ div.sphinxsidebar a {
+ color: #AAA;
+ }
+
+ div.sphinxsidebar p.logo {
+ display: none;
+ }
+
+ div.document {
+ width: 100%;
+ margin: 0;
+ }
+
+ div.footer {
+ display: none;
+ }
+
+ div.bodywrapper {
+ margin: 0;
+ }
+
+ div.body {
+ min-height: 0;
+ min-width: auto; /* fixes width on small screens, breaks .hll */
+ padding: 0;
+ }
+
+ .hll {
+ /* "fixes" the breakage */
+ width: max-content;
+ }
+
+ .rtd_doc_footer {
+ display: none;
+ }
+
+ .document {
+ width: auto;
+ }
+
+ .footer {
+ width: auto;
+ }
+
+ .github {
+ display: none;
+ }
+
+ ul {
+ margin-left: 0;
+ }
+
+ li > ul {
+ /* Matches the 30px from the "ul, ol" selector above */
+ margin-left: 30px;
+ }
+}
+
+
+/* misc. */
+
+.revsys-inline {
+ display: none!important;
+}
+
+/* Hide ugly table cell borders in ..bibliography:: directive output */
+table.docutils.citation, table.docutils.citation td, table.docutils.citation th {
+ border: none;
+ /* Below needed in some edge cases; if not applied, bottom shadows appear */
+ -moz-box-shadow: none;
+ -webkit-box-shadow: none;
+ box-shadow: none;
+}
+
+
+/* relbar */
+
+.related {
+ line-height: 30px;
+ width: 100%;
+ font-size: 0.9rem;
+}
+
+.related.top {
+ border-bottom: 1px solid #EEE;
+ margin-bottom: 20px;
+}
+
+.related.bottom {
+ border-top: 1px solid #EEE;
+}
+
+.related ul {
+ padding: 0;
+ margin: 0;
+ list-style: none;
+}
+
+.related li {
+ display: inline;
+}
+
+nav#rellinks {
+ float: right;
+}
+
+nav#rellinks li+li:before {
+ content: "|";
+}
+
+nav#breadcrumbs li+li:before {
+ content: "\00BB";
+}
+
+/* Hide certain items when printing */
+@media print {
+ div.related {
+ display: none;
+ }
+}
+
+img.github {
+ position: absolute;
+ top: 0;
+ border: 0;
+ right: 0;
+}
\ No newline at end of file
diff --git a/experimental/python/_static/basic.css b/experimental/python/_static/basic.css
new file mode 100644
index 0000000000..e5179b7a9d
--- /dev/null
+++ b/experimental/python/_static/basic.css
@@ -0,0 +1,925 @@
+/*
+ * basic.css
+ * ~~~~~~~~~
+ *
+ * Sphinx stylesheet -- basic theme.
+ *
+ * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/* -- main layout ----------------------------------------------------------- */
+
+div.clearer {
+ clear: both;
+}
+
+div.section::after {
+ display: block;
+ content: '';
+ clear: left;
+}
+
+/* -- relbar ---------------------------------------------------------------- */
+
+div.related {
+ width: 100%;
+ font-size: 90%;
+}
+
+div.related h3 {
+ display: none;
+}
+
+div.related ul {
+ margin: 0;
+ padding: 0 0 0 10px;
+ list-style: none;
+}
+
+div.related li {
+ display: inline;
+}
+
+div.related li.right {
+ float: right;
+ margin-right: 5px;
+}
+
+/* -- sidebar --------------------------------------------------------------- */
+
+div.sphinxsidebarwrapper {
+ padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+ float: left;
+ width: 230px;
+ margin-left: -100%;
+ font-size: 90%;
+ word-wrap: break-word;
+ overflow-wrap : break-word;
+}
+
+div.sphinxsidebar ul {
+ list-style: none;
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+ margin-left: 20px;
+ list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+div.sphinxsidebar form {
+ margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+ border: 1px solid #98dbcc;
+ font-family: sans-serif;
+ font-size: 1em;
+}
+
+div.sphinxsidebar #searchbox form.search {
+ overflow: hidden;
+}
+
+div.sphinxsidebar #searchbox input[type="text"] {
+ float: left;
+ width: 80%;
+ padding: 0.25em;
+ box-sizing: border-box;
+}
+
+div.sphinxsidebar #searchbox input[type="submit"] {
+ float: left;
+ width: 20%;
+ border-left: none;
+ padding: 0.25em;
+ box-sizing: border-box;
+}
+
+
+img {
+ border: 0;
+ max-width: 100%;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li p.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+ width: 90%;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.3em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+ font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable {
+ width: 100%;
+}
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable ul {
+ margin-top: 0;
+ margin-bottom: 0;
+ list-style-type: none;
+}
+
+table.indextable > tbody > tr > td > ul {
+ padding-left: 0em;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+div.modindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+div.genindex-jumpbox {
+ border-top: 1px solid #ddd;
+ border-bottom: 1px solid #ddd;
+ margin: 1em 0 1em 0;
+ padding: 0.4em;
+}
+
+/* -- domain module index --------------------------------------------------- */
+
+table.modindextable td {
+ padding: 2px;
+ border-collapse: collapse;
+}
+
+/* -- general body styles --------------------------------------------------- */
+
+div.body {
+ min-width: inherit;
+ max-width: 800px;
+}
+
+div.body p, div.body dd, div.body li, div.body blockquote {
+ -moz-hyphens: auto;
+ -ms-hyphens: auto;
+ -webkit-hyphens: auto;
+ hyphens: auto;
+}
+
+a.headerlink {
+ visibility: hidden;
+}
+
+a:visited {
+ color: #551A8B;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink,
+caption:hover > a.headerlink,
+p.caption:hover > a.headerlink,
+div.code-block-caption:hover > a.headerlink {
+ visibility: visible;
+}
+
+div.body p.caption {
+ text-align: inherit;
+}
+
+div.body td {
+ text-align: left;
+}
+
+.first {
+ margin-top: 0 !important;
+}
+
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+img.align-left, figure.align-left, .figure.align-left, object.align-left {
+ clear: left;
+ float: left;
+ margin-right: 1em;
+}
+
+img.align-right, figure.align-right, .figure.align-right, object.align-right {
+ clear: right;
+ float: right;
+ margin-left: 1em;
+}
+
+img.align-center, figure.align-center, .figure.align-center, object.align-center {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+img.align-default, figure.align-default, .figure.align-default {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+}
+
+.align-left {
+ text-align: left;
+}
+
+.align-center {
+ text-align: center;
+}
+
+.align-default {
+ text-align: center;
+}
+
+.align-right {
+ text-align: right;
+}
+
+/* -- sidebars -------------------------------------------------------------- */
+
+div.sidebar,
+aside.sidebar {
+ margin: 0 0 0.5em 1em;
+ border: 1px solid #ddb;
+ padding: 7px;
+ background-color: #ffe;
+ width: 40%;
+ float: right;
+ clear: right;
+ overflow-x: auto;
+}
+
+p.sidebar-title {
+ font-weight: bold;
+}
+
+nav.contents,
+aside.topic,
+div.admonition, div.topic, blockquote {
+ clear: left;
+}
+
+/* -- topics ---------------------------------------------------------------- */
+
+nav.contents,
+aside.topic,
+div.topic {
+ border: 1px solid #ccc;
+ padding: 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* -- admonitions ----------------------------------------------------------- */
+
+div.admonition {
+ margin-top: 10px;
+ margin-bottom: 10px;
+ padding: 7px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+/* -- content of sidebars/topics/admonitions -------------------------------- */
+
+div.sidebar > :last-child,
+aside.sidebar > :last-child,
+nav.contents > :last-child,
+aside.topic > :last-child,
+div.topic > :last-child,
+div.admonition > :last-child {
+ margin-bottom: 0;
+}
+
+div.sidebar::after,
+aside.sidebar::after,
+nav.contents::after,
+aside.topic::after,
+div.topic::after,
+div.admonition::after,
+blockquote::after {
+ display: block;
+ content: '';
+ clear: both;
+}
+
+/* -- tables ---------------------------------------------------------------- */
+
+table.docutils {
+ margin-top: 10px;
+ margin-bottom: 10px;
+ border: 0;
+ border-collapse: collapse;
+}
+
+table.align-center {
+ margin-left: auto;
+ margin-right: auto;
+}
+
+table.align-default {
+ margin-left: auto;
+ margin-right: auto;
+}
+
+table caption span.caption-number {
+ font-style: italic;
+}
+
+table caption span.caption-text {
+}
+
+table.docutils td, table.docutils th {
+ padding: 1px 8px 1px 5px;
+ border-top: 0;
+ border-left: 0;
+ border-right: 0;
+ border-bottom: 1px solid #aaa;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+table.citation {
+ border-left: solid 1px gray;
+ margin-left: 1px;
+}
+
+table.citation td {
+ border-bottom: none;
+}
+
+th > :first-child,
+td > :first-child {
+ margin-top: 0px;
+}
+
+th > :last-child,
+td > :last-child {
+ margin-bottom: 0px;
+}
+
+/* -- figures --------------------------------------------------------------- */
+
+div.figure, figure {
+ margin: 0.5em;
+ padding: 0.5em;
+}
+
+div.figure p.caption, figcaption {
+ padding: 0.3em;
+}
+
+div.figure p.caption span.caption-number,
+figcaption span.caption-number {
+ font-style: italic;
+}
+
+div.figure p.caption span.caption-text,
+figcaption span.caption-text {
+}
+
+/* -- field list styles ----------------------------------------------------- */
+
+table.field-list td, table.field-list th {
+ border: 0 !important;
+}
+
+.field-list ul {
+ margin: 0;
+ padding-left: 1em;
+}
+
+.field-list p {
+ margin: 0;
+}
+
+.field-name {
+ -moz-hyphens: manual;
+ -ms-hyphens: manual;
+ -webkit-hyphens: manual;
+ hyphens: manual;
+}
+
+/* -- hlist styles ---------------------------------------------------------- */
+
+table.hlist {
+ margin: 1em 0;
+}
+
+table.hlist td {
+ vertical-align: top;
+}
+
+/* -- object description styles --------------------------------------------- */
+
+.sig {
+ font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace;
+}
+
+.sig-name, code.descname {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+.sig-name {
+ font-size: 1.1em;
+}
+
+code.descname {
+ font-size: 1.2em;
+}
+
+.sig-prename, code.descclassname {
+ background-color: transparent;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.sig-paren {
+ font-size: larger;
+}
+
+.sig-param.n {
+ font-style: italic;
+}
+
+/* C++ specific styling */
+
+.sig-inline.c-texpr,
+.sig-inline.cpp-texpr {
+ font-family: unset;
+}
+
+.sig.c .k, .sig.c .kt,
+.sig.cpp .k, .sig.cpp .kt {
+ color: #0033B3;
+}
+
+.sig.c .m,
+.sig.cpp .m {
+ color: #1750EB;
+}
+
+.sig.c .s, .sig.c .sc,
+.sig.cpp .s, .sig.cpp .sc {
+ color: #067D17;
+}
+
+
+/* -- other body styles ----------------------------------------------------- */
+
+ol.arabic {
+ list-style: decimal;
+}
+
+ol.loweralpha {
+ list-style: lower-alpha;
+}
+
+ol.upperalpha {
+ list-style: upper-alpha;
+}
+
+ol.lowerroman {
+ list-style: lower-roman;
+}
+
+ol.upperroman {
+ list-style: upper-roman;
+}
+
+:not(li) > ol > li:first-child > :first-child,
+:not(li) > ul > li:first-child > :first-child {
+ margin-top: 0px;
+}
+
+:not(li) > ol > li:last-child > :last-child,
+:not(li) > ul > li:last-child > :last-child {
+ margin-bottom: 0px;
+}
+
+ol.simple ol p,
+ol.simple ul p,
+ul.simple ol p,
+ul.simple ul p {
+ margin-top: 0;
+}
+
+ol.simple > li:not(:first-child) > p,
+ul.simple > li:not(:first-child) > p {
+ margin-top: 0;
+}
+
+ol.simple p,
+ul.simple p {
+ margin-bottom: 0;
+}
+
+aside.footnote > span,
+div.citation > span {
+ float: left;
+}
+aside.footnote > span:last-of-type,
+div.citation > span:last-of-type {
+ padding-right: 0.5em;
+}
+aside.footnote > p {
+ margin-left: 2em;
+}
+div.citation > p {
+ margin-left: 4em;
+}
+aside.footnote > p:last-of-type,
+div.citation > p:last-of-type {
+ margin-bottom: 0em;
+}
+aside.footnote > p:last-of-type:after,
+div.citation > p:last-of-type:after {
+ content: "";
+ clear: both;
+}
+
+dl.field-list {
+ display: grid;
+ grid-template-columns: fit-content(30%) auto;
+}
+
+dl.field-list > dt {
+ font-weight: bold;
+ word-break: break-word;
+ padding-left: 0.5em;
+ padding-right: 5px;
+}
+
+dl.field-list > dd {
+ padding-left: 0.5em;
+ margin-top: 0em;
+ margin-left: 0em;
+ margin-bottom: 0em;
+}
+
+dl {
+ margin-bottom: 15px;
+}
+
+dd > :first-child {
+ margin-top: 0px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+}
+
+.sig dd {
+ margin-top: 0px;
+ margin-bottom: 0px;
+}
+
+.sig dl {
+ margin-top: 0px;
+ margin-bottom: 0px;
+}
+
+dl > dd:last-child,
+dl > dd:last-child > :last-child {
+ margin-bottom: 0;
+}
+
+dt:target, span.highlighted {
+ background-color: #fbe54e;
+}
+
+rect.highlighted {
+ fill: #fbe54e;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 1.1em;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+.system-message {
+ background-color: #fda;
+ padding: 5px;
+ border: 3px solid red;
+}
+
+.footnote:target {
+ background-color: #ffa;
+}
+
+.line-block {
+ display: block;
+ margin-top: 1em;
+ margin-bottom: 1em;
+}
+
+.line-block .line-block {
+ margin-top: 0;
+ margin-bottom: 0;
+ margin-left: 1.5em;
+}
+
+.guilabel, .menuselection {
+ font-family: sans-serif;
+}
+
+.accelerator {
+ text-decoration: underline;
+}
+
+.classifier {
+ font-style: oblique;
+}
+
+.classifier:before {
+ font-style: normal;
+ margin: 0 0.5em;
+ content: ":";
+ display: inline-block;
+}
+
+abbr, acronym {
+ border-bottom: dotted 1px;
+ cursor: help;
+}
+
+.translated {
+ background-color: rgba(207, 255, 207, 0.2)
+}
+
+.untranslated {
+ background-color: rgba(255, 207, 207, 0.2)
+}
+
+/* -- code displays --------------------------------------------------------- */
+
+pre {
+ overflow: auto;
+ overflow-y: hidden; /* fixes display issues on Chrome browsers */
+}
+
+pre, div[class*="highlight-"] {
+ clear: both;
+}
+
+span.pre {
+ -moz-hyphens: none;
+ -ms-hyphens: none;
+ -webkit-hyphens: none;
+ hyphens: none;
+ white-space: nowrap;
+}
+
+div[class*="highlight-"] {
+ margin: 1em 0;
+}
+
+td.linenos pre {
+ border: 0;
+ background-color: transparent;
+ color: #aaa;
+}
+
+table.highlighttable {
+ display: block;
+}
+
+table.highlighttable tbody {
+ display: block;
+}
+
+table.highlighttable tr {
+ display: flex;
+}
+
+table.highlighttable td {
+ margin: 0;
+ padding: 0;
+}
+
+table.highlighttable td.linenos {
+ padding-right: 0.5em;
+}
+
+table.highlighttable td.code {
+ flex: 1;
+ overflow: hidden;
+}
+
+.highlight .hll {
+ display: block;
+}
+
+div.highlight pre,
+table.highlighttable pre {
+ margin: 0;
+}
+
+div.code-block-caption + div {
+ margin-top: 0;
+}
+
+div.code-block-caption {
+ margin-top: 1em;
+ padding: 2px 5px;
+ font-size: small;
+}
+
+div.code-block-caption code {
+ background-color: transparent;
+}
+
+table.highlighttable td.linenos,
+span.linenos,
+div.highlight span.gp { /* gp: Generic.Prompt */
+ user-select: none;
+ -webkit-user-select: text; /* Safari fallback only */
+ -webkit-user-select: none; /* Chrome/Safari */
+ -moz-user-select: none; /* Firefox */
+ -ms-user-select: none; /* IE10+ */
+}
+
+div.code-block-caption span.caption-number {
+ padding: 0.1em 0.3em;
+ font-style: italic;
+}
+
+div.code-block-caption span.caption-text {
+}
+
+div.literal-block-wrapper {
+ margin: 1em 0;
+}
+
+code.xref, a code {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
+ background-color: transparent;
+}
+
+.viewcode-link {
+ float: right;
+}
+
+.viewcode-back {
+ float: right;
+ font-family: sans-serif;
+}
+
+div.viewcode-block:target {
+ margin: -1px -10px;
+ padding: 0 10px;
+}
+
+/* -- math display ---------------------------------------------------------- */
+
+img.math {
+ vertical-align: middle;
+}
+
+div.body div.math p {
+ text-align: center;
+}
+
+span.eqno {
+ float: right;
+}
+
+span.eqno a.headerlink {
+ position: absolute;
+ z-index: 1;
+}
+
+div.math:hover a.headerlink {
+ visibility: visible;
+}
+
+/* -- printout stylesheet --------------------------------------------------- */
+
+@media print {
+ div.document,
+ div.documentwrapper,
+ div.bodywrapper {
+ margin: 0 !important;
+ width: 100%;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ #top-link {
+ display: none;
+ }
+}
\ No newline at end of file
diff --git a/experimental/python/_static/custom.css b/experimental/python/_static/custom.css
new file mode 100644
index 0000000000..2a924f1d6a
--- /dev/null
+++ b/experimental/python/_static/custom.css
@@ -0,0 +1 @@
+/* This file intentionally left blank. */
diff --git a/experimental/python/_static/databricks-logo.svg b/experimental/python/_static/databricks-logo.svg
new file mode 100644
index 0000000000..4e61421b77
--- /dev/null
+++ b/experimental/python/_static/databricks-logo.svg
@@ -0,0 +1,82 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/experimental/python/_static/doctools.js b/experimental/python/_static/doctools.js
new file mode 100644
index 0000000000..4d67807d17
--- /dev/null
+++ b/experimental/python/_static/doctools.js
@@ -0,0 +1,156 @@
+/*
+ * doctools.js
+ * ~~~~~~~~~~~
+ *
+ * Base JavaScript utilities for all Sphinx HTML documentation.
+ *
+ * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+"use strict";
+
+const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([
+ "TEXTAREA",
+ "INPUT",
+ "SELECT",
+ "BUTTON",
+]);
+
+const _ready = (callback) => {
+ if (document.readyState !== "loading") {
+ callback();
+ } else {
+ document.addEventListener("DOMContentLoaded", callback);
+ }
+};
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+const Documentation = {
+ init: () => {
+ Documentation.initDomainIndexTable();
+ Documentation.initOnKeyListeners();
+ },
+
+ /**
+ * i18n support
+ */
+ TRANSLATIONS: {},
+ PLURAL_EXPR: (n) => (n === 1 ? 0 : 1),
+ LOCALE: "unknown",
+
+ // gettext and ngettext don't access this so that the functions
+ // can safely bound to a different name (_ = Documentation.gettext)
+ gettext: (string) => {
+ const translated = Documentation.TRANSLATIONS[string];
+ switch (typeof translated) {
+ case "undefined":
+ return string; // no translation
+ case "string":
+ return translated; // translation exists
+ default:
+ return translated[0]; // (singular, plural) translation tuple exists
+ }
+ },
+
+ ngettext: (singular, plural, n) => {
+ const translated = Documentation.TRANSLATIONS[singular];
+ if (typeof translated !== "undefined")
+ return translated[Documentation.PLURAL_EXPR(n)];
+ return n === 1 ? singular : plural;
+ },
+
+ addTranslations: (catalog) => {
+ Object.assign(Documentation.TRANSLATIONS, catalog.messages);
+ Documentation.PLURAL_EXPR = new Function(
+ "n",
+ `return (${catalog.plural_expr})`
+ );
+ Documentation.LOCALE = catalog.locale;
+ },
+
+ /**
+ * helper function to focus on search bar
+ */
+ focusSearchBar: () => {
+ document.querySelectorAll("input[name=q]")[0]?.focus();
+ },
+
+ /**
+ * Initialise the domain index toggle buttons
+ */
+ initDomainIndexTable: () => {
+ const toggler = (el) => {
+ const idNumber = el.id.substr(7);
+ const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`);
+ if (el.src.substr(-9) === "minus.png") {
+ el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`;
+ toggledRows.forEach((el) => (el.style.display = "none"));
+ } else {
+ el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`;
+ toggledRows.forEach((el) => (el.style.display = ""));
+ }
+ };
+
+ const togglerElements = document.querySelectorAll("img.toggler");
+ togglerElements.forEach((el) =>
+ el.addEventListener("click", (event) => toggler(event.currentTarget))
+ );
+ togglerElements.forEach((el) => (el.style.display = ""));
+ if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler);
+ },
+
+ initOnKeyListeners: () => {
+ // only install a listener if it is really needed
+ if (
+ !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS &&
+ !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
+ )
+ return;
+
+ document.addEventListener("keydown", (event) => {
+ // bail for input elements
+ if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
+ // bail with special keys
+ if (event.altKey || event.ctrlKey || event.metaKey) return;
+
+ if (!event.shiftKey) {
+ switch (event.key) {
+ case "ArrowLeft":
+ if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
+
+ const prevLink = document.querySelector('link[rel="prev"]');
+ if (prevLink && prevLink.href) {
+ window.location.href = prevLink.href;
+ event.preventDefault();
+ }
+ break;
+ case "ArrowRight":
+ if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break;
+
+ const nextLink = document.querySelector('link[rel="next"]');
+ if (nextLink && nextLink.href) {
+ window.location.href = nextLink.href;
+ event.preventDefault();
+ }
+ break;
+ }
+ }
+
+ // some keyboard layouts may need Shift to get /
+ switch (event.key) {
+ case "/":
+ if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break;
+ Documentation.focusSearchBar();
+ event.preventDefault();
+ }
+ });
+ },
+};
+
+// quick alias for translations
+const _ = Documentation.gettext;
+
+_ready(Documentation.init);
diff --git a/experimental/python/_static/documentation_options.js b/experimental/python/_static/documentation_options.js
new file mode 100644
index 0000000000..e0b60b61d0
--- /dev/null
+++ b/experimental/python/_static/documentation_options.js
@@ -0,0 +1,13 @@
+const DOCUMENTATION_OPTIONS = {
+ VERSION: 'beta',
+ LANGUAGE: 'en',
+ COLLAPSE_INDEX: false,
+ BUILDER: 'html',
+ FILE_SUFFIX: '.html',
+ LINK_SUFFIX: '.html',
+ HAS_SOURCE: true,
+ SOURCELINK_SUFFIX: '.txt',
+ NAVIGATION_WITH_KEYS: false,
+ SHOW_SEARCH_SUMMARY: true,
+ ENABLE_SEARCH_SHORTCUTS: true,
+};
\ No newline at end of file
diff --git a/experimental/python/_static/file.png b/experimental/python/_static/file.png
new file mode 100644
index 0000000000..a858a410e4
Binary files /dev/null and b/experimental/python/_static/file.png differ
diff --git a/experimental/python/_static/github-banner.svg b/experimental/python/_static/github-banner.svg
new file mode 100644
index 0000000000..c47d9dc0cd
--- /dev/null
+++ b/experimental/python/_static/github-banner.svg
@@ -0,0 +1,5 @@
+
+
+
+
+
diff --git a/experimental/python/_static/language_data.js b/experimental/python/_static/language_data.js
new file mode 100644
index 0000000000..367b8ed81b
--- /dev/null
+++ b/experimental/python/_static/language_data.js
@@ -0,0 +1,199 @@
+/*
+ * language_data.js
+ * ~~~~~~~~~~~~~~~~
+ *
+ * This script contains the language-specific data used by searchtools.js,
+ * namely the list of stopwords, stemmer, scorer and splitter.
+ *
+ * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"];
+
+
+/* Non-minified version is copied as a separate JS file, if available */
+
+/**
+ * Porter Stemmer
+ */
+var Stemmer = function() {
+
+ var step2list = {
+ ational: 'ate',
+ tional: 'tion',
+ enci: 'ence',
+ anci: 'ance',
+ izer: 'ize',
+ bli: 'ble',
+ alli: 'al',
+ entli: 'ent',
+ eli: 'e',
+ ousli: 'ous',
+ ization: 'ize',
+ ation: 'ate',
+ ator: 'ate',
+ alism: 'al',
+ iveness: 'ive',
+ fulness: 'ful',
+ ousness: 'ous',
+ aliti: 'al',
+ iviti: 'ive',
+ biliti: 'ble',
+ logi: 'log'
+ };
+
+ var step3list = {
+ icate: 'ic',
+ ative: '',
+ alize: 'al',
+ iciti: 'ic',
+ ical: 'ic',
+ ful: '',
+ ness: ''
+ };
+
+ var c = "[^aeiou]"; // consonant
+ var v = "[aeiouy]"; // vowel
+ var C = c + "[^aeiouy]*"; // consonant sequence
+ var V = v + "[aeiou]*"; // vowel sequence
+
+ var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
+ var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
+ var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
+ var s_v = "^(" + C + ")?" + v; // vowel in stem
+
+ this.stemWord = function (w) {
+ var stem;
+ var suffix;
+ var firstch;
+ var origword = w;
+
+ if (w.length < 3)
+ return w;
+
+ var re;
+ var re2;
+ var re3;
+ var re4;
+
+ firstch = w.substr(0,1);
+ if (firstch == "y")
+ w = firstch.toUpperCase() + w.substr(1);
+
+ // Step 1a
+ re = /^(.+?)(ss|i)es$/;
+ re2 = /^(.+?)([^s])s$/;
+
+ if (re.test(w))
+ w = w.replace(re,"$1$2");
+ else if (re2.test(w))
+ w = w.replace(re2,"$1$2");
+
+ // Step 1b
+ re = /^(.+?)eed$/;
+ re2 = /^(.+?)(ed|ing)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ re = new RegExp(mgr0);
+ if (re.test(fp[1])) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1];
+ re2 = new RegExp(s_v);
+ if (re2.test(stem)) {
+ w = stem;
+ re2 = /(at|bl|iz)$/;
+ re3 = new RegExp("([^aeiouylsz])\\1$");
+ re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re2.test(w))
+ w = w + "e";
+ else if (re3.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+ else if (re4.test(w))
+ w = w + "e";
+ }
+ }
+
+ // Step 1c
+ re = /^(.+?)y$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(s_v);
+ if (re.test(stem))
+ w = stem + "i";
+ }
+
+ // Step 2
+ re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step2list[suffix];
+ }
+
+ // Step 3
+ re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ suffix = fp[2];
+ re = new RegExp(mgr0);
+ if (re.test(stem))
+ w = stem + step3list[suffix];
+ }
+
+ // Step 4
+ re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
+ re2 = /^(.+?)(s|t)(ion)$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ if (re.test(stem))
+ w = stem;
+ }
+ else if (re2.test(w)) {
+ var fp = re2.exec(w);
+ stem = fp[1] + fp[2];
+ re2 = new RegExp(mgr1);
+ if (re2.test(stem))
+ w = stem;
+ }
+
+ // Step 5
+ re = /^(.+?)e$/;
+ if (re.test(w)) {
+ var fp = re.exec(w);
+ stem = fp[1];
+ re = new RegExp(mgr1);
+ re2 = new RegExp(meq1);
+ re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+ if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
+ w = stem;
+ }
+ re = /ll$/;
+ re2 = new RegExp(mgr1);
+ if (re.test(w) && re2.test(w)) {
+ re = /.$/;
+ w = w.replace(re,"");
+ }
+
+ // and turn initial Y back to y
+ if (firstch == "y")
+ w = firstch.toLowerCase() + w.substr(1);
+ return w;
+ }
+}
+
diff --git a/experimental/python/_static/minus.png b/experimental/python/_static/minus.png
new file mode 100644
index 0000000000..d96755fdaf
Binary files /dev/null and b/experimental/python/_static/minus.png differ
diff --git a/experimental/python/_static/plus.png b/experimental/python/_static/plus.png
new file mode 100644
index 0000000000..7107cec93a
Binary files /dev/null and b/experimental/python/_static/plus.png differ
diff --git a/experimental/python/_static/pygments.css b/experimental/python/_static/pygments.css
new file mode 100644
index 0000000000..04a41742ed
--- /dev/null
+++ b/experimental/python/_static/pygments.css
@@ -0,0 +1,84 @@
+pre { line-height: 125%; }
+td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
+span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
+td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
+span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
+.highlight .hll { background-color: #ffffcc }
+.highlight { background: #f8f8f8; }
+.highlight .c { color: #8f5902; font-style: italic } /* Comment */
+.highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */
+.highlight .g { color: #000000 } /* Generic */
+.highlight .k { color: #004461; font-weight: bold } /* Keyword */
+.highlight .l { color: #000000 } /* Literal */
+.highlight .n { color: #000000 } /* Name */
+.highlight .o { color: #582800 } /* Operator */
+.highlight .x { color: #000000 } /* Other */
+.highlight .p { color: #000000; font-weight: bold } /* Punctuation */
+.highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */
+.highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #8f5902 } /* Comment.Preproc */
+.highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */
+.highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */
+.highlight .gd { color: #a40000 } /* Generic.Deleted */
+.highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */
+.highlight .ges { color: #000000 } /* Generic.EmphStrong */
+.highlight .gr { color: #ef2929 } /* Generic.Error */
+.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.highlight .gi { color: #00A000 } /* Generic.Inserted */
+.highlight .go { color: #888888 } /* Generic.Output */
+.highlight .gp { color: #745334 } /* Generic.Prompt */
+.highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */
+.highlight .kc { color: #004461; font-weight: bold } /* Keyword.Constant */
+.highlight .kd { color: #004461; font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { color: #004461; font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { color: #004461; font-weight: bold } /* Keyword.Pseudo */
+.highlight .kr { color: #004461; font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #004461; font-weight: bold } /* Keyword.Type */
+.highlight .ld { color: #000000 } /* Literal.Date */
+.highlight .m { color: #990000 } /* Literal.Number */
+.highlight .s { color: #4e9a06 } /* Literal.String */
+.highlight .na { color: #c4a000 } /* Name.Attribute */
+.highlight .nb { color: #004461 } /* Name.Builtin */
+.highlight .nc { color: #000000 } /* Name.Class */
+.highlight .no { color: #000000 } /* Name.Constant */
+.highlight .nd { color: #888888 } /* Name.Decorator */
+.highlight .ni { color: #ce5c00 } /* Name.Entity */
+.highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */
+.highlight .nf { color: #000000 } /* Name.Function */
+.highlight .nl { color: #f57900 } /* Name.Label */
+.highlight .nn { color: #000000 } /* Name.Namespace */
+.highlight .nx { color: #000000 } /* Name.Other */
+.highlight .py { color: #000000 } /* Name.Property */
+.highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */
+.highlight .nv { color: #000000 } /* Name.Variable */
+.highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */
+.highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */
+.highlight .w { color: #f8f8f8 } /* Text.Whitespace */
+.highlight .mb { color: #990000 } /* Literal.Number.Bin */
+.highlight .mf { color: #990000 } /* Literal.Number.Float */
+.highlight .mh { color: #990000 } /* Literal.Number.Hex */
+.highlight .mi { color: #990000 } /* Literal.Number.Integer */
+.highlight .mo { color: #990000 } /* Literal.Number.Oct */
+.highlight .sa { color: #4e9a06 } /* Literal.String.Affix */
+.highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */
+.highlight .sc { color: #4e9a06 } /* Literal.String.Char */
+.highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */
+.highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */
+.highlight .s2 { color: #4e9a06 } /* Literal.String.Double */
+.highlight .se { color: #4e9a06 } /* Literal.String.Escape */
+.highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */
+.highlight .si { color: #4e9a06 } /* Literal.String.Interpol */
+.highlight .sx { color: #4e9a06 } /* Literal.String.Other */
+.highlight .sr { color: #4e9a06 } /* Literal.String.Regex */
+.highlight .s1 { color: #4e9a06 } /* Literal.String.Single */
+.highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */
+.highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */
+.highlight .fm { color: #000000 } /* Name.Function.Magic */
+.highlight .vc { color: #000000 } /* Name.Variable.Class */
+.highlight .vg { color: #000000 } /* Name.Variable.Global */
+.highlight .vi { color: #000000 } /* Name.Variable.Instance */
+.highlight .vm { color: #000000 } /* Name.Variable.Magic */
+.highlight .il { color: #990000 } /* Literal.Number.Integer.Long */
\ No newline at end of file
diff --git a/experimental/python/_static/searchtools.js b/experimental/python/_static/searchtools.js
new file mode 100644
index 0000000000..b08d58c9b9
--- /dev/null
+++ b/experimental/python/_static/searchtools.js
@@ -0,0 +1,620 @@
+/*
+ * searchtools.js
+ * ~~~~~~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilities for the full-text search.
+ *
+ * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+"use strict";
+
+/**
+ * Simple result scoring code.
+ */
+if (typeof Scorer === "undefined") {
+ var Scorer = {
+ // Implement the following function to further tweak the score for each result
+ // The function takes a result array [docname, title, anchor, descr, score, filename]
+ // and returns the new score.
+ /*
+ score: result => {
+ const [docname, title, anchor, descr, score, filename] = result
+ return score
+ },
+ */
+
+ // query matches the full name of an object
+ objNameMatch: 11,
+ // or matches in the last dotted part of the object name
+ objPartialMatch: 6,
+ // Additive scores depending on the priority of the object
+ objPrio: {
+ 0: 15, // used to be importantResults
+ 1: 5, // used to be objectResults
+ 2: -5, // used to be unimportantResults
+ },
+ // Used when the priority is not in the mapping.
+ objPrioDefault: 0,
+
+ // query found in title
+ title: 15,
+ partialTitle: 7,
+ // query found in terms
+ term: 5,
+ partialTerm: 2,
+ };
+}
+
+const _removeChildren = (element) => {
+ while (element && element.lastChild) element.removeChild(element.lastChild);
+};
+
+/**
+ * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
+ */
+const _escapeRegExp = (string) =>
+ string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
+
+const _displayItem = (item, searchTerms, highlightTerms) => {
+ const docBuilder = DOCUMENTATION_OPTIONS.BUILDER;
+ const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX;
+ const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX;
+ const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY;
+ const contentRoot = document.documentElement.dataset.content_root;
+
+ const [docName, title, anchor, descr, score, _filename] = item;
+
+ let listItem = document.createElement("li");
+ let requestUrl;
+ let linkUrl;
+ if (docBuilder === "dirhtml") {
+ // dirhtml builder
+ let dirname = docName + "/";
+ if (dirname.match(/\/index\/$/))
+ dirname = dirname.substring(0, dirname.length - 6);
+ else if (dirname === "index/") dirname = "";
+ requestUrl = contentRoot + dirname;
+ linkUrl = requestUrl;
+ } else {
+ // normal html builders
+ requestUrl = contentRoot + docName + docFileSuffix;
+ linkUrl = docName + docLinkSuffix;
+ }
+ let linkEl = listItem.appendChild(document.createElement("a"));
+ linkEl.href = linkUrl + anchor;
+ linkEl.dataset.score = score;
+ linkEl.innerHTML = title;
+ if (descr) {
+ listItem.appendChild(document.createElement("span")).innerHTML =
+ " (" + descr + ")";
+ // highlight search terms in the description
+ if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
+ highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
+ }
+ else if (showSearchSummary)
+ fetch(requestUrl)
+ .then((responseData) => responseData.text())
+ .then((data) => {
+ if (data)
+ listItem.appendChild(
+ Search.makeSearchSummary(data, searchTerms, anchor)
+ );
+ // highlight search terms in the summary
+ if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
+ highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
+ });
+ Search.output.appendChild(listItem);
+};
+const _finishSearch = (resultCount) => {
+ Search.stopPulse();
+ Search.title.innerText = _("Search Results");
+ if (!resultCount)
+ Search.status.innerText = Documentation.gettext(
+ "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
+ );
+ else
+ Search.status.innerText = _(
+ "Search finished, found ${resultCount} page(s) matching the search query."
+ ).replace('${resultCount}', resultCount);
+};
+const _displayNextItem = (
+ results,
+ resultCount,
+ searchTerms,
+ highlightTerms,
+) => {
+ // results left, load the summary and display it
+ // this is intended to be dynamic (don't sub resultsCount)
+ if (results.length) {
+ _displayItem(results.pop(), searchTerms, highlightTerms);
+ setTimeout(
+ () => _displayNextItem(results, resultCount, searchTerms, highlightTerms),
+ 5
+ );
+ }
+ // search finished, update title and status message
+ else _finishSearch(resultCount);
+};
+// Helper function used by query() to order search results.
+// Each input is an array of [docname, title, anchor, descr, score, filename].
+// Order the results by score (in opposite order of appearance, since the
+// `_displayNextItem` function uses pop() to retrieve items) and then alphabetically.
+const _orderResultsByScoreThenName = (a, b) => {
+ const leftScore = a[4];
+ const rightScore = b[4];
+ if (leftScore === rightScore) {
+ // same score: sort alphabetically
+ const leftTitle = a[1].toLowerCase();
+ const rightTitle = b[1].toLowerCase();
+ if (leftTitle === rightTitle) return 0;
+ return leftTitle > rightTitle ? -1 : 1; // inverted is intentional
+ }
+ return leftScore > rightScore ? 1 : -1;
+};
+
+/**
+ * Default splitQuery function. Can be overridden in ``sphinx.search`` with a
+ * custom function per language.
+ *
+ * The regular expression works by splitting the string on consecutive characters
+ * that are not Unicode letters, numbers, underscores, or emoji characters.
+ * This is the same as ``\W+`` in Python, preserving the surrogate pair area.
+ */
+if (typeof splitQuery === "undefined") {
+ var splitQuery = (query) => query
+ .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
+ .filter(term => term) // remove remaining empty strings
+}
+
+/**
+ * Search Module
+ */
+const Search = {
+ _index: null,
+ _queued_query: null,
+ _pulse_status: -1,
+
+ htmlToText: (htmlString, anchor) => {
+ const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html');
+ for (const removalQuery of [".headerlink", "script", "style"]) {
+ htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() });
+ }
+ if (anchor) {
+ const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`);
+ if (anchorContent) return anchorContent.textContent;
+
+ console.warn(
+ `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.`
+ );
+ }
+
+ // if anchor not specified or not found, fall back to main content
+ const docContent = htmlElement.querySelector('[role="main"]');
+ if (docContent) return docContent.textContent;
+
+ console.warn(
+ "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template."
+ );
+ return "";
+ },
+
+ init: () => {
+ const query = new URLSearchParams(window.location.search).get("q");
+ document
+ .querySelectorAll('input[name="q"]')
+ .forEach((el) => (el.value = query));
+ if (query) Search.performSearch(query);
+ },
+
+ loadIndex: (url) =>
+ (document.body.appendChild(document.createElement("script")).src = url),
+
+ setIndex: (index) => {
+ Search._index = index;
+ if (Search._queued_query !== null) {
+ const query = Search._queued_query;
+ Search._queued_query = null;
+ Search.query(query);
+ }
+ },
+
+ hasIndex: () => Search._index !== null,
+
+ deferQuery: (query) => (Search._queued_query = query),
+
+ stopPulse: () => (Search._pulse_status = -1),
+
+ startPulse: () => {
+ if (Search._pulse_status >= 0) return;
+
+ const pulse = () => {
+ Search._pulse_status = (Search._pulse_status + 1) % 4;
+ Search.dots.innerText = ".".repeat(Search._pulse_status);
+ if (Search._pulse_status >= 0) window.setTimeout(pulse, 500);
+ };
+ pulse();
+ },
+
+ /**
+ * perform a search for something (or wait until index is loaded)
+ */
+ performSearch: (query) => {
+ // create the required interface elements
+ const searchText = document.createElement("h2");
+ searchText.textContent = _("Searching");
+ const searchSummary = document.createElement("p");
+ searchSummary.classList.add("search-summary");
+ searchSummary.innerText = "";
+ const searchList = document.createElement("ul");
+ searchList.classList.add("search");
+
+ const out = document.getElementById("search-results");
+ Search.title = out.appendChild(searchText);
+ Search.dots = Search.title.appendChild(document.createElement("span"));
+ Search.status = out.appendChild(searchSummary);
+ Search.output = out.appendChild(searchList);
+
+ const searchProgress = document.getElementById("search-progress");
+ // Some themes don't use the search progress node
+ if (searchProgress) {
+ searchProgress.innerText = _("Preparing search...");
+ }
+ Search.startPulse();
+
+ // index already loaded, the browser was quick!
+ if (Search.hasIndex()) Search.query(query);
+ else Search.deferQuery(query);
+ },
+
+ _parseQuery: (query) => {
+ // stem the search terms and add them to the correct list
+ const stemmer = new Stemmer();
+ const searchTerms = new Set();
+ const excludedTerms = new Set();
+ const highlightTerms = new Set();
+ const objectTerms = new Set(splitQuery(query.toLowerCase().trim()));
+ splitQuery(query.trim()).forEach((queryTerm) => {
+ const queryTermLower = queryTerm.toLowerCase();
+
+ // maybe skip this "word"
+ // stopwords array is from language_data.js
+ if (
+ stopwords.indexOf(queryTermLower) !== -1 ||
+ queryTerm.match(/^\d+$/)
+ )
+ return;
+
+ // stem the word
+ let word = stemmer.stemWord(queryTermLower);
+ // select the correct list
+ if (word[0] === "-") excludedTerms.add(word.substr(1));
+ else {
+ searchTerms.add(word);
+ highlightTerms.add(queryTermLower);
+ }
+ });
+
+ if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js
+ localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" "))
+ }
+
+ // console.debug("SEARCH: searching for:");
+ // console.info("required: ", [...searchTerms]);
+ // console.info("excluded: ", [...excludedTerms]);
+
+ return [query, searchTerms, excludedTerms, highlightTerms, objectTerms];
+ },
+
+ /**
+ * execute search (requires search index to be loaded)
+ */
+ _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => {
+ const filenames = Search._index.filenames;
+ const docNames = Search._index.docnames;
+ const titles = Search._index.titles;
+ const allTitles = Search._index.alltitles;
+ const indexEntries = Search._index.indexentries;
+
+ // Collect multiple result groups to be sorted separately and then ordered.
+ // Each is an array of [docname, title, anchor, descr, score, filename].
+ const normalResults = [];
+ const nonMainIndexResults = [];
+
+ _removeChildren(document.getElementById("search-progress"));
+
+ const queryLower = query.toLowerCase().trim();
+ for (const [title, foundTitles] of Object.entries(allTitles)) {
+ if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) {
+ for (const [file, id] of foundTitles) {
+ const score = Math.round(Scorer.title * queryLower.length / title.length);
+ const boost = titles[file] === title ? 1 : 0; // add a boost for document titles
+ normalResults.push([
+ docNames[file],
+ titles[file] !== title ? `${titles[file]} > ${title}` : title,
+ id !== null ? "#" + id : "",
+ null,
+ score + boost,
+ filenames[file],
+ ]);
+ }
+ }
+ }
+
+ // search for explicit entries in index directives
+ for (const [entry, foundEntries] of Object.entries(indexEntries)) {
+ if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) {
+ for (const [file, id, isMain] of foundEntries) {
+ const score = Math.round(100 * queryLower.length / entry.length);
+ const result = [
+ docNames[file],
+ titles[file],
+ id ? "#" + id : "",
+ null,
+ score,
+ filenames[file],
+ ];
+ if (isMain) {
+ normalResults.push(result);
+ } else {
+ nonMainIndexResults.push(result);
+ }
+ }
+ }
+ }
+
+ // lookup as object
+ objectTerms.forEach((term) =>
+ normalResults.push(...Search.performObjectSearch(term, objectTerms))
+ );
+
+ // lookup as search terms in fulltext
+ normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms));
+
+ // let the scorer override scores with a custom scoring function
+ if (Scorer.score) {
+ normalResults.forEach((item) => (item[4] = Scorer.score(item)));
+ nonMainIndexResults.forEach((item) => (item[4] = Scorer.score(item)));
+ }
+
+ // Sort each group of results by score and then alphabetically by name.
+ normalResults.sort(_orderResultsByScoreThenName);
+ nonMainIndexResults.sort(_orderResultsByScoreThenName);
+
+ // Combine the result groups in (reverse) order.
+ // Non-main index entries are typically arbitrary cross-references,
+ // so display them after other results.
+ let results = [...nonMainIndexResults, ...normalResults];
+
+ // remove duplicate search results
+ // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
+ let seen = new Set();
+ results = results.reverse().reduce((acc, result) => {
+ let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(',');
+ if (!seen.has(resultStr)) {
+ acc.push(result);
+ seen.add(resultStr);
+ }
+ return acc;
+ }, []);
+
+ return results.reverse();
+ },
+
+ query: (query) => {
+ const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query);
+ const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms);
+
+ // for debugging
+ //Search.lastresults = results.slice(); // a copy
+ // console.info("search results:", Search.lastresults);
+
+ // print the results
+ _displayNextItem(results, results.length, searchTerms, highlightTerms);
+ },
+
+ /**
+ * search for object names
+ */
+ performObjectSearch: (object, objectTerms) => {
+ const filenames = Search._index.filenames;
+ const docNames = Search._index.docnames;
+ const objects = Search._index.objects;
+ const objNames = Search._index.objnames;
+ const titles = Search._index.titles;
+
+ const results = [];
+
+ const objectSearchCallback = (prefix, match) => {
+ const name = match[4]
+ const fullname = (prefix ? prefix + "." : "") + name;
+ const fullnameLower = fullname.toLowerCase();
+ if (fullnameLower.indexOf(object) < 0) return;
+
+ let score = 0;
+ const parts = fullnameLower.split(".");
+
+ // check for different match types: exact matches of full name or
+ // "last name" (i.e. last dotted part)
+ if (fullnameLower === object || parts.slice(-1)[0] === object)
+ score += Scorer.objNameMatch;
+ else if (parts.slice(-1)[0].indexOf(object) > -1)
+ score += Scorer.objPartialMatch; // matches in last name
+
+ const objName = objNames[match[1]][2];
+ const title = titles[match[0]];
+
+ // If more than one term searched for, we require other words to be
+ // found in the name/title/description
+ const otherTerms = new Set(objectTerms);
+ otherTerms.delete(object);
+ if (otherTerms.size > 0) {
+ const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase();
+ if (
+ [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0)
+ )
+ return;
+ }
+
+ let anchor = match[3];
+ if (anchor === "") anchor = fullname;
+ else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname;
+
+ const descr = objName + _(", in ") + title;
+
+ // add custom score for some objects according to scorer
+ if (Scorer.objPrio.hasOwnProperty(match[2]))
+ score += Scorer.objPrio[match[2]];
+ else score += Scorer.objPrioDefault;
+
+ results.push([
+ docNames[match[0]],
+ fullname,
+ "#" + anchor,
+ descr,
+ score,
+ filenames[match[0]],
+ ]);
+ };
+ Object.keys(objects).forEach((prefix) =>
+ objects[prefix].forEach((array) =>
+ objectSearchCallback(prefix, array)
+ )
+ );
+ return results;
+ },
+
+ /**
+ * search for full-text terms in the index
+ */
+ performTermsSearch: (searchTerms, excludedTerms) => {
+ // prepare search
+ const terms = Search._index.terms;
+ const titleTerms = Search._index.titleterms;
+ const filenames = Search._index.filenames;
+ const docNames = Search._index.docnames;
+ const titles = Search._index.titles;
+
+ const scoreMap = new Map();
+ const fileMap = new Map();
+
+ // perform the search on the required terms
+ searchTerms.forEach((word) => {
+ const files = [];
+ const arr = [
+ { files: terms[word], score: Scorer.term },
+ { files: titleTerms[word], score: Scorer.title },
+ ];
+ // add support for partial matches
+ if (word.length > 2) {
+ const escapedWord = _escapeRegExp(word);
+ if (!terms.hasOwnProperty(word)) {
+ Object.keys(terms).forEach((term) => {
+ if (term.match(escapedWord))
+ arr.push({ files: terms[term], score: Scorer.partialTerm });
+ });
+ }
+ if (!titleTerms.hasOwnProperty(word)) {
+ Object.keys(titleTerms).forEach((term) => {
+ if (term.match(escapedWord))
+ arr.push({ files: titleTerms[term], score: Scorer.partialTitle });
+ });
+ }
+ }
+
+ // no match but word was a required one
+ if (arr.every((record) => record.files === undefined)) return;
+
+ // found search word in contents
+ arr.forEach((record) => {
+ if (record.files === undefined) return;
+
+ let recordFiles = record.files;
+ if (recordFiles.length === undefined) recordFiles = [recordFiles];
+ files.push(...recordFiles);
+
+ // set score for the word in each file
+ recordFiles.forEach((file) => {
+ if (!scoreMap.has(file)) scoreMap.set(file, {});
+ scoreMap.get(file)[word] = record.score;
+ });
+ });
+
+ // create the mapping
+ files.forEach((file) => {
+ if (!fileMap.has(file)) fileMap.set(file, [word]);
+ else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word);
+ });
+ });
+
+ // now check if the files don't contain excluded terms
+ const results = [];
+ for (const [file, wordList] of fileMap) {
+ // check if all requirements are matched
+
+ // as search terms with length < 3 are discarded
+ const filteredTermCount = [...searchTerms].filter(
+ (term) => term.length > 2
+ ).length;
+ if (
+ wordList.length !== searchTerms.size &&
+ wordList.length !== filteredTermCount
+ )
+ continue;
+
+ // ensure that none of the excluded terms is in the search result
+ if (
+ [...excludedTerms].some(
+ (term) =>
+ terms[term] === file ||
+ titleTerms[term] === file ||
+ (terms[term] || []).includes(file) ||
+ (titleTerms[term] || []).includes(file)
+ )
+ )
+ break;
+
+ // select one (max) score for the file.
+ const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w]));
+ // add result to the result list
+ results.push([
+ docNames[file],
+ titles[file],
+ "",
+ null,
+ score,
+ filenames[file],
+ ]);
+ }
+ return results;
+ },
+
+ /**
+ * helper function to return a node containing the
+ * search summary for a given text. keywords is a list
+ * of stemmed words.
+ */
+ makeSearchSummary: (htmlText, keywords, anchor) => {
+ const text = Search.htmlToText(htmlText, anchor);
+ if (text === "") return null;
+
+ const textLower = text.toLowerCase();
+ const actualStartPosition = [...keywords]
+ .map((k) => textLower.indexOf(k.toLowerCase()))
+ .filter((i) => i > -1)
+ .slice(-1)[0];
+ const startWithContext = Math.max(actualStartPosition - 120, 0);
+
+ const top = startWithContext === 0 ? "" : "...";
+ const tail = startWithContext + 240 < text.length ? "..." : "";
+
+ let summary = document.createElement("p");
+ summary.classList.add("context");
+ summary.textContent = top + text.substr(startWithContext, 240).trim() + tail;
+
+ return summary;
+ },
+};
+
+_ready(Search.init);
diff --git a/experimental/python/_static/sphinx_highlight.js b/experimental/python/_static/sphinx_highlight.js
new file mode 100644
index 0000000000..8a96c69a19
--- /dev/null
+++ b/experimental/python/_static/sphinx_highlight.js
@@ -0,0 +1,154 @@
+/* Highlighting utilities for Sphinx HTML documentation. */
+"use strict";
+
+const SPHINX_HIGHLIGHT_ENABLED = true
+
+/**
+ * highlight a given string on a node by wrapping it in
+ * span elements with the given class name.
+ */
+const _highlight = (node, addItems, text, className) => {
+ if (node.nodeType === Node.TEXT_NODE) {
+ const val = node.nodeValue;
+ const parent = node.parentNode;
+ const pos = val.toLowerCase().indexOf(text);
+ if (
+ pos >= 0 &&
+ !parent.classList.contains(className) &&
+ !parent.classList.contains("nohighlight")
+ ) {
+ let span;
+
+ const closestNode = parent.closest("body, svg, foreignObject");
+ const isInSVG = closestNode && closestNode.matches("svg");
+ if (isInSVG) {
+ span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
+ } else {
+ span = document.createElement("span");
+ span.classList.add(className);
+ }
+
+ span.appendChild(document.createTextNode(val.substr(pos, text.length)));
+ const rest = document.createTextNode(val.substr(pos + text.length));
+ parent.insertBefore(
+ span,
+ parent.insertBefore(
+ rest,
+ node.nextSibling
+ )
+ );
+ node.nodeValue = val.substr(0, pos);
+ /* There may be more occurrences of search term in this node. So call this
+ * function recursively on the remaining fragment.
+ */
+ _highlight(rest, addItems, text, className);
+
+ if (isInSVG) {
+ const rect = document.createElementNS(
+ "http://www.w3.org/2000/svg",
+ "rect"
+ );
+ const bbox = parent.getBBox();
+ rect.x.baseVal.value = bbox.x;
+ rect.y.baseVal.value = bbox.y;
+ rect.width.baseVal.value = bbox.width;
+ rect.height.baseVal.value = bbox.height;
+ rect.setAttribute("class", className);
+ addItems.push({ parent: parent, target: rect });
+ }
+ }
+ } else if (node.matches && !node.matches("button, select, textarea")) {
+ node.childNodes.forEach((el) => _highlight(el, addItems, text, className));
+ }
+};
+const _highlightText = (thisNode, text, className) => {
+ let addItems = [];
+ _highlight(thisNode, addItems, text, className);
+ addItems.forEach((obj) =>
+ obj.parent.insertAdjacentElement("beforebegin", obj.target)
+ );
+};
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+const SphinxHighlight = {
+
+ /**
+ * highlight the search words provided in localstorage in the text
+ */
+ highlightSearchWords: () => {
+ if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
+
+ // get and clear terms from localstorage
+ const url = new URL(window.location);
+ const highlight =
+ localStorage.getItem("sphinx_highlight_terms")
+ || url.searchParams.get("highlight")
+ || "";
+ localStorage.removeItem("sphinx_highlight_terms")
+ url.searchParams.delete("highlight");
+ window.history.replaceState({}, "", url);
+
+ // get individual terms from highlight string
+ const terms = highlight.toLowerCase().split(/\s+/).filter(x => x);
+ if (terms.length === 0) return; // nothing to do
+
+ // There should never be more than one element matching "div.body"
+ const divBody = document.querySelectorAll("div.body");
+ const body = divBody.length ? divBody[0] : document.querySelector("body");
+ window.setTimeout(() => {
+ terms.forEach((term) => _highlightText(body, term, "highlighted"));
+ }, 10);
+
+ const searchBox = document.getElementById("searchbox");
+ if (searchBox === null) return;
+ searchBox.appendChild(
+ document
+ .createRange()
+ .createContextualFragment(
+ '
' +
+ '' +
+ _("Hide Search Matches") +
+ "
"
+ )
+ );
+ },
+
+ /**
+ * helper function to hide the search marks again
+ */
+ hideSearchWords: () => {
+ document
+ .querySelectorAll("#searchbox .highlight-link")
+ .forEach((el) => el.remove());
+ document
+ .querySelectorAll("span.highlighted")
+ .forEach((el) => el.classList.remove("highlighted"));
+ localStorage.removeItem("sphinx_highlight_terms")
+ },
+
+ initEscapeListener: () => {
+ // only install a listener if it is really needed
+ if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return;
+
+ document.addEventListener("keydown", (event) => {
+ // bail for input elements
+ if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
+ // bail with special keys
+ if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return;
+ if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) {
+ SphinxHighlight.hideSearchWords();
+ event.preventDefault();
+ }
+ });
+ },
+};
+
+_ready(() => {
+ /* Do not call highlightSearchWords() when we are on the search page.
+ * It will highlight words from the *previous* search query.
+ */
+ if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords();
+ SphinxHighlight.initEscapeListener();
+});
diff --git a/experimental/python/databricks.bundles.core.html b/experimental/python/databricks.bundles.core.html
new file mode 100644
index 0000000000..255f655095
--- /dev/null
+++ b/experimental/python/databricks.bundles.core.html
@@ -0,0 +1,778 @@
+
+
+
+
+
+
+
+ Core — databricks-bundles beta documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Core
+Package: databricks.bundles.core
+
+Classes
+
+
+class Resources
+Resources is a collection of resources in a bundle.
+Resources class is returned by ‘load_resources’ function specified in databricks.yml. Each element in
+‘python/resources’ list is a fully qualified function name that returns an instance of Resources class.
+If there are multiple functions specified in ‘python/resources’ list, the resources from all functions
+are combined into a single Resources object.
+Example:
+experimental :
+ python :
+ resources :
+ - "resources:load_resources"
+
+
+load_resources function can be implemented using built-in functions:
+
+Programmatic construction of resources is supported using add_resource()
and add_job()
methods.
+Example:
+def load_resources ( bundle : Bundle ) -> Resources :
+ resources = Resources ()
+
+ for resource_name , config in get_configs ():
+ job = create_job ( config )
+
+ resources . add_job ( resource_name , job )
+
+ return resources
+
+
+
+
+property jobs : dict [ str , Job ]
+
+
+
+
+property diagnostics : Diagnostics
+Returns diagnostics. If there are any diagnostic errors, bundle validation fails.
+
+
+
+
+add_resource (
+
+
+resource_name : str ,
+resource : Resource ,
+* ,
+location : Location | None ,
+
+
+) → None
+Adds a resource to the collection of resources. Resource name must be unique across all
+resources of the same type.
+
+Parameters:
+
+resource_name – unique identifier for the resource
+resource – the resource to add
+location – optional location of the resource in the source code
+
+
+
+
+
+
+
+add_job (
+
+
+resource_name : str ,
+job : JobParam ,
+* ,
+location : Location | None ,
+
+
+) → None
+Adds a job to the collection of resources. Resource name must be unique across all jobs.
+
+Parameters:
+
+resource_name – unique identifier for the job
+job – the job to add, can be Job or dict
+location – optional location of the job in the source code
+
+
+
+
+
+
+
+add_location (
+
+
+path : tuple [ str , ... ] ,
+location : Location ,
+
+
+) → None
+Associate source code location with a path in the bundle configuration.
+
+
+
+
+add_diagnostics (
+
+
+other : Diagnostics ,
+
+
+) → None
+Add diagnostics from another Diagnostics object.
+:param other:
+:return:
+
+
+
+
+add_diagnostic_error (
+
+
+msg : str ,
+* ,
+detail : str | None ,
+path : tuple [ str , ... ] | None ,
+location : Location | None ,
+
+
+) → None
+Report a diagnostic error. If there are any diagnostic errors, bundle validation fails.
+
+Parameters:
+
+msg – short summary of the error
+detail – optional detailed description of the error
+path – optional path in bundle configuration where the error occurred
+location – optional location in the source code where the error occurred
+
+
+
+
+
+
+
+add_diagnostic_warning (
+
+
+msg : str ,
+* ,
+detail : str | None ,
+path : tuple [ str , ... ] | None ,
+location : Location | None ,
+
+
+) → None
+Report a diagnostic warning. Warnings are informational and do not cause bundle validation to fail.
+
+Parameters:
+
+msg – short summary of the warning
+detail – optional detailed description of the warning
+path – optional path in bundle configuration where the warning occurred
+location – optional location in the source code where the warning occurred
+
+
+
+
+
+
+
+add_resources (
+
+
+other : Resources ,
+
+
+) → None
+Add resources from another Resources object.
+Adds error to diagnostics if there are duplicate resource names.
+
+
+
+
+
+
+class Resource
+Base class for all resources.
+
+
+
+
+class ResourceMutator
+Mutators defined within a single Python module are applied in the order they are defined.
+The relative order of mutators defined in different modules is not guaranteed.
+See databricks.bundles.core.job_mutator()
.
+
+
+resource_type : Type [ _T ]
+Resource type that this mutator applies to.
+
+
+
+
+function : Callable
+Underling function that was decorated. Can be accessed for unit-testing.
+
+
+
+
+
+
+class Bundle
+Bundle contains information about a bundle accessible in functions
+loading and mutating resources.
+
+
+target : str
+Selected target where the bundle is being loaded. E.g.: ‘development’, ‘staging’, or ‘production’.
+
+
+
+
+variables : dict [ str , Any ]
+Values of bundle variables resolved for selected target. Bundle variables are defined in databricks.yml.
+For accessing variables as structured data, use resolve_variable()
.
+Example:
+variables :
+ default_dbr_version :
+ description : Default version of Databricks Runtime
+ default : "14.3.x-scala2.12"
+
+
+
+
+
+
+resolve_variable (
+
+
+variable : Variable [ _T ] | _T ,
+
+
+) → _T
+Resolve a variable to its value.
+If the value is a variable, it will be resolved and returned.
+Otherwise, the value will be returned as is.
+
+
+
+
+resolve_variable_list (
+
+
+variable : Variable [ list [ Variable [ _T ] | _T ] ] | list [ Variable [ _T ] | _T ] ,
+
+
+) → list [ _T ]
+Resolve a list variable to its value.
+If the value is a variable, or the list item is a variable, it will be resolved and returned.
+Otherwise, the value will be returned as is.
+
+
+
+
+
+
+class Variable
+Reference to a bundle variable.
+See: Databricks Asset Bundles configuration
+
+
+path : str
+Path to the variable, e.g. “var.my_variable”.
+
+
+
+
+type : Type [ _T ]
+Type of the variable.
+
+
+
+
+property value : str
+Returns the variable path in the format of “${path}”
+
+
+
+
+
+
+class Diagnostics
+Diagnostics is a collection of errors and warnings we print to users.
+Each item can have source location or path associated, that is reported in output to
+indicate where the error or warning occurred.
+
+
+items : tuple [ Diagnostic , ... ]
+
+
+
+
+extend (
+
+
+diagnostics : Self ,
+
+
+) → Self
+Extend items with another diagnostics. This pattern allows
+to accumulate errors and warnings.
+Example:
+def foo () -> Diagnostics : ...
+def bar () -> Diagnostics : ...
+
+diagnostics = Diagnostics ()
+diagnostics = diagnostics . extend ( foo ())
+diagnostics = diagnostics . extend ( bar ())
+
+
+
+
+
+
+extend_tuple (
+
+
+pair : tuple [ _T , Self ] ,
+
+
+) → tuple [ _T , Self ]
+Extend items with another diagnostics. This variant is useful when
+methods return a pair of value and diagnostics. This pattern allows
+to accumulate errors and warnings.
+Example:
+def foo () -> ( int , Diagnostics ): ...
+
+diagnostics = Diagnostics ()
+value , diagnostics = diagnostics . extend_tuple ( foo ())
+
+
+
+
+
+
+has_error ( ) → bool
+Returns True if there is at least one error in diagnostics.
+
+
+
+
+classmethod create_error (
+
+
+msg : str ,
+* ,
+detail : str | None ,
+location : Location | None ,
+path : tuple [ str , ... ] | None ,
+
+
+) → Self
+Create an error diagnostics.
+
+
+
+
+classmethod create_warning (
+
+
+msg : str ,
+* ,
+detail : str | None ,
+location : Location | None ,
+path : tuple [ str , ... ] | None ,
+
+
+) → Self
+Create a warning diagnostics.
+
+
+
+
+classmethod from_exception (
+
+
+exc : Exception ,
+* ,
+summary : str ,
+location : Location | None ,
+path : tuple [ str , ... ] | None ,
+explanation : str | None ,
+
+
+) → Self
+Create diagnostics from an exception.
+
+Parameters:
+
+exc – exception to create diagnostics from
+summary – short summary of the error
+location – optional location in the source code where the error occurred
+path – optional path to relevant property in databricks.yml
+explanation – optional explanation to add to the details
+
+
+
+
+
+
+
+
+
+class Diagnostic
+
+
+severity : Severity
+Severity of the diagnostics item.
+
+
+
+
+summary : str
+Short summary of the error or warning.
+
+
+
+
+detail : str | None = None
+Explanation of the error or warning.
+
+
+
+
+path : tuple [ str , ... ] | None = None
+Path in databricks.yml where the error or warning occurred.
+
+
+
+
+location : Location | None = None
+Source code location where the error or warning occurred.
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class Location
+
+
+file : str
+
+
+
+
+line : int | None = None
+
+
+
+
+column : int | None = None
+
+
+
+
+static from_callable (
+
+
+fn : Callable ,
+
+
+) → Location | None
+Capture location of callable. This is useful for creating
+diagnostics of decorated functions.
+
+
+
+
+static from_stack_frame (
+
+
+depth : int = 0 ,
+
+
+) → Location
+Capture location of the caller
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class Severity
+
+
+WARNING = 'warning'
+
+
+
+
+ERROR = 'error'
+
+
+
+
+
+
+class T
+TypeVar
for variable value
+
+
+
+
+Methods
+
+
+core. load_resources_from_current_package_module ( ) → Resources
+Load resources from all submodules of the current package module.
+
+
+
+
+core. load_resources_from_module (
+
+
+module : ModuleType ,
+
+
+) → Resources
+Load resources from the given module.
+For recursive loading of resources from submodules, use load_resources_from_package_module .
+
+Parameters:
+module – module to load resources from
+
+
+
+
+
+
+core. load_resources_from_modules (
+
+
+modules : Iterable [ ModuleType ] ,
+
+
+) → Resources
+Load resources from the given modules.
+For recursive loading of resources from submodules, use load_resources_from_package_module .
+
+Parameters:
+modules – list of modules to load resources from
+
+
+
+
+
+
+core. load_resources_from_package_module (
+
+
+package_module : ModuleType ,
+
+
+) → Resources
+Load resources from all submodules of the given package module.
+
+Parameters:
+package_module – package module to load resources from
+
+
+
+
+
+
+Decorators
+
+
+@ job_mutator (
+
+
+function : Callable ,
+
+
+) → ResourceMutator [ Job ]
+Decorator for defining a job mutator. Function should return a new instance of the job with the desired changes,
+instead of mutating the input job.
+Example:
+@job_mutator
+def my_job_mutator ( bundle : Bundle , job : Job ) -> Job :
+ return replace ( job , name = "my_job" )
+
+
+
+Parameters:
+function – Function that mutates a job.
+
+
+
+
+
+
+@ variables (
+
+
+cls : type [ _T ] ,
+
+
+) → type [ _T ]
+A decorator that initializes each annotated attribute in a class
+with Variable
type. Variables are initialized with a path
+that corresponds to the attribute name. Variables should specify their
+type, or else they will be treated as Any
. Complex types
+like data classes, lists or dictionaries are supported.
+For example, if your databricks.yml file contains:
+variables :
+ warehouse_id :
+ description : Warehouse ID for SQL tasks
+ default : ...
+
+
+You can define a class with a warehouse_id attribute:
+@variables
+class MyVariables :
+ warehouse_id : Variable [ str ] # ${var.warehouse_id}
+
+
+And later use it in your code as MyVariables.warehouse_id .
+For accessing bundle variable values, see Bundle.resolve_variable()
.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/experimental/python/databricks.bundles.jobs.html b/experimental/python/databricks.bundles.jobs.html
new file mode 100644
index 0000000000..57c5765ef7
--- /dev/null
+++ b/experimental/python/databricks.bundles.jobs.html
@@ -0,0 +1,3735 @@
+
+
+
+
+
+
+
+ Jobs — databricks-bundles beta documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Jobs
+Package: databricks.bundles.jobs
+
+Classes
+
+
+class Adlsgen2Info
+
+
+destination : str
+abfss destination, e.g. abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name> .
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class AutoScale
+
+
+max_workers : int | None = None
+The maximum number of workers to which the cluster can scale up when overloaded.
+Note that max_workers must be strictly greater than min_workers .
+
+
+
+
+min_workers : int | None = None
+The minimum number of workers to which the cluster can scale down when underutilized.
+It is also the initial number of workers the cluster will have after creation.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class AwsAttributes
+
+
+availability : AwsAvailability | None = None
+
+
+
+
+ebs_volume_count : int | None = None
+The number of volumes launched for each instance. Users can choose up to 10 volumes.
+This feature is only enabled for supported node types. Legacy node types cannot specify
+custom EBS volumes.
+For node types with no instance store, at least one EBS volume needs to be specified;
+otherwise, cluster creation will fail.
+These EBS volumes will be mounted at /ebs0 , /ebs1 , and etc.
+Instance store volumes will be mounted at /local_disk0 , /local_disk1 , and etc.
+If EBS volumes are attached, Databricks will configure Spark to use only the EBS volumes for
+scratch storage because heterogenously sized scratch devices can lead to inefficient disk
+utilization. If no EBS volumes are attached, Databricks will configure Spark to use instance
+store volumes.
+Please note that if EBS volumes are specified, then the Spark configuration spark.local.dir
+will be overridden.
+
+
+
+
+ebs_volume_iops : int | None = None
+If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.
+
+
+
+
+ebs_volume_size : int | None = None
+The size of each EBS volume (in GiB) launched for each instance. For general purpose
+SSD, this value must be within the range 100 - 4096. For throughput optimized HDD,
+this value must be within the range 500 - 4096.
+
+
+
+
+ebs_volume_throughput : int | None = None
+If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.
+
+
+
+
+ebs_volume_type : EbsVolumeType | None = None
+
+
+
+
+first_on_demand : int | None = None
+The first first_on_demand nodes of the cluster will be placed on on-demand instances.
+If this value is greater than 0, the cluster driver node in particular will be placed on an
+on-demand instance. If this value is greater than or equal to the current cluster size, all
+nodes will be placed on on-demand instances. If this value is less than the current cluster
+size, first_on_demand nodes will be placed on on-demand instances and the remainder will
+be placed on availability instances. Note that this value does not affect
+cluster size and cannot currently be mutated over the lifetime of a cluster.
+
+
+
+
+instance_profile_arn : str | None = None
+Nodes for this cluster will only be placed on AWS instances with this instance profile. If
+ommitted, nodes will be placed on instances without an IAM instance profile. The instance
+profile must have previously been added to the Databricks environment by an account
+administrator.
+This feature may only be available to certain customer plans.
+If this field is ommitted, we will pull in the default from the conf if it exists.
+
+
+
+
+spot_bid_price_percent : int | None = None
+The bid price for AWS spot instances, as a percentage of the corresponding instance type’s
+on-demand price.
+For example, if this field is set to 50, and the cluster needs a new r3.xlarge spot
+instance, then the bid price is half of the price of
+on-demand r3.xlarge instances. Similarly, if this field is set to 200, the bid price is twice
+the price of on-demand r3.xlarge instances. If not specified, the default value is 100.
+When spot instances are requested for this cluster, only spot instances whose bid price
+percentage matches this field will be considered.
+Note that, for safety, we enforce this field to be no more than 10000.
+The default value and documentation here should be kept consistent with
+CommonConf.defaultSpotBidPricePercent and CommonConf.maxSpotBidPricePercent.
+
+
+
+
+zone_id : str | None = None
+Identifier for the availability zone/datacenter in which the cluster resides.
+This string will be of a form like “us-west-2a”. The provided availability
+zone must be in the same region as the Databricks deployment. For example, “us-west-2a”
+is not a valid zone id if the Databricks deployment resides in the “us-east-1” region.
+This is an optional field at cluster creation, and if not specified, a default zone will be used.
+If the zone specified is “auto”, will try to place cluster in a zone with high availability,
+and will retry placement in a different AZ if there is not enough capacity.
+The list of available zones as well as the default value can be found by using the
+List Zones method.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class AwsAvailability
+Availability type used for all subsequent nodes past the first_on_demand ones.
+Note: If first_on_demand is zero, this availability type will be used for the entire cluster.
+
+
+SPOT = 'SPOT'
+
+
+
+
+ON_DEMAND = 'ON_DEMAND'
+
+
+
+
+SPOT_WITH_FALLBACK = 'SPOT_WITH_FALLBACK'
+
+
+
+
+
+
+class AzureAttributes
+
+
+availability : AzureAvailability | None = None
+
+
+
+
+first_on_demand : int | None = None
+The first first_on_demand nodes of the cluster will be placed on on-demand instances.
+This value should be greater than 0, to make sure the cluster driver node is placed on an
+on-demand instance. If this value is greater than or equal to the current cluster size, all
+nodes will be placed on on-demand instances. If this value is less than the current cluster
+size, first_on_demand nodes will be placed on on-demand instances and the remainder will
+be placed on availability instances. Note that this value does not affect
+cluster size and cannot currently be mutated over the lifetime of a cluster.
+
+
+
+
+log_analytics_info : LogAnalyticsInfo | None = None
+Defines values necessary to configure and run Azure Log Analytics agent
+
+
+
+
+spot_bid_max_price : float | None = None
+The max bid price to be used for Azure spot instances.
+The Max price for the bid cannot be higher than the on-demand price of the instance.
+If not specified, the default value is -1, which specifies that the instance cannot be evicted
+on the basis of price, and only on the basis of availability. Further, the value should > 0 or -1.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class AzureAvailability
+Availability type used for all subsequent nodes past the first_on_demand ones.
+Note: If first_on_demand is zero (which only happens on pool clusters), this availability
+type will be used for the entire cluster.
+
+
+SPOT_AZURE = 'SPOT_AZURE'
+
+
+
+
+ON_DEMAND_AZURE = 'ON_DEMAND_AZURE'
+
+
+
+
+SPOT_WITH_FALLBACK_AZURE = 'SPOT_WITH_FALLBACK_AZURE'
+
+
+
+
+
+
+class ClientsTypes
+
+
+jobs : bool | None = None
+With jobs set, the cluster can be used for jobs
+
+
+
+
+notebooks : bool | None = None
+With notebooks set, this cluster can be used for notebooks
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class ClusterLogConf
+
+
+dbfs : DbfsStorageInfo | None = None
+destination needs to be provided. e.g.
+{ “dbfs” : { “destination” : “dbfs:/home/cluster_log” } }
+
+
+
+
+s3 : S3StorageInfo | None = None
+destination and either the region or endpoint need to be provided. e.g.
+{ “s3”: { “destination” : “s3://cluster_log_bucket/prefix”, “region” : “us-west-2” } }
+Cluster iam role is used to access s3, please make sure the cluster iam role in
+instance_profile_arn has permission to write data to the s3 destination.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class ClusterSpec
+
+
+apply_policy_default_values : bool | None = None
+When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.
+
+
+
+
+autoscale : AutoScale | None = None
+Parameters needed in order to automatically scale clusters up and down based on load.
+Note: autoscaling works best with DB runtime versions 3.0 or later.
+
+
+
+
+autotermination_minutes : int | None = None
+Automatically terminates the cluster after it is inactive for this time in minutes. If not set,
+this cluster will not be automatically terminated. If specified, the threshold must be between
+10 and 10000 minutes.
+Users can also set this value to 0 to explicitly disable automatic termination.
+
+
+
+
+aws_attributes : AwsAttributes | None = None
+Attributes related to clusters running on Amazon Web Services.
+If not specified at cluster creation, a set of default values will be used.
+
+
+
+
+azure_attributes : AzureAttributes | None = None
+Attributes related to clusters running on Microsoft Azure.
+If not specified at cluster creation, a set of default values will be used.
+
+
+
+
+cluster_log_conf : ClusterLogConf | None = None
+The configuration for delivering spark logs to a long-term storage destination.
+Two kinds of destinations (dbfs and s3) are supported. Only one destination can be specified
+for one cluster. If the conf is given, the logs will be delivered to the destination every
+5 mins . The destination of driver logs is $destination/$clusterId/driver , while
+the destination of executor logs is $destination/$clusterId/executor .
+
+
+
+
+cluster_name : str | None = None
+Cluster name requested by the user. This doesn’t have to be unique.
+If not specified at creation, the cluster name will be an empty string.
+
+
+
+
+custom_tags : dict [ str , str ]
+Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
+instances and EBS volumes) with these tags in addition to default_tags . Notes:
+
+Currently, Databricks allows at most 45 custom tags
+Clusters can only reuse cloud resources if the resources’ tags are a subset of the cluster tags
+
+
+
+
+
+data_security_mode : DataSecurityMode | None = None
+
+
+
+
+docker_image : DockerImage | None = None
+
+
+
+
+driver_instance_pool_id : str | None = None
+The optional ID of the instance pool for the driver of the cluster belongs.
+The pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not
+assigned.
+
+
+
+
+driver_node_type_id : str | None = None
+The node type of the Spark driver. Note that this field is optional;
+if unset, the driver node type will be set as the same value
+as node_type_id defined above.
+
+
+
+
+enable_elastic_disk : bool | None = None
+Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
+space when its Spark workers are running low on disk space. This feature requires specific AWS
+permissions to function correctly - refer to the User Guide for more details.
+
+
+
+
+enable_local_disk_encryption : bool | None = None
+Whether to enable LUKS on cluster VMs’ local disks
+
+
+
+
+gcp_attributes : GcpAttributes | None = None
+Attributes related to clusters running on Google Cloud Platform.
+If not specified at cluster creation, a set of default values will be used.
+
+
+
+
+init_scripts : list [ InitScriptInfo ]
+The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If cluster_log_conf is specified, init script logs are sent to <destination>/<cluster-ID>/init_scripts .
+
+
+
+
+instance_pool_id : str | None = None
+The optional ID of the instance pool to which the cluster belongs.
+
+
+
+
+is_single_node : bool | None = None
+This field can only be used with kind .
+When set to true, Databricks will automatically set single node related custom_tags , spark_conf , and num_workers
+
+
+
+
+node_type_id : str | None = None
+This field encodes, through a single value, the resources available to each of
+the Spark nodes in this cluster. For example, the Spark nodes can be provisioned
+and optimized for memory or compute intensive workloads. A list of available node
+types can be retrieved by using the :method:clusters/listNodeTypes API call.
+
+
+
+
+num_workers : int | None = None
+Number of worker nodes that this cluster should have. A cluster has one Spark Driver
+and num_workers Executors for a total of num_workers + 1 Spark nodes.
+Note: When reading the properties of a cluster, this field reflects the desired number
+of workers rather than the actual current number of workers. For instance, if a cluster
+is resized from 5 to 10 workers, this field will immediately be updated to reflect
+the target size of 10 workers, whereas the workers listed in spark_info will gradually
+increase from 5 to 10 as the new nodes are provisioned.
+
+
+
+
+policy_id : str | None = None
+The ID of the cluster policy used to create the cluster if applicable.
+
+
+
+
+runtime_engine : RuntimeEngine | None = None
+
+
+
+
+single_user_name : str | None = None
+Single user name if data_security_mode is SINGLE_USER
+
+
+
+
+spark_conf : dict [ str , str ]
+An object containing a set of optional, user-specified Spark configuration key-value pairs.
+Users can also pass in a string of extra JVM options to the driver and the executors via
+spark.driver.extraJavaOptions and spark.executor.extraJavaOptions respectively.
+
+
+
+
+spark_env_vars : dict [ str , str ]
+An object containing a set of optional, user-specified environment variable key-value pairs.
+Please note that key-value pair of the form (X,Y) will be exported as is (i.e.,
+export X=’Y’ ) while launching the driver and workers.
+In order to specify an additional set of SPARK_DAEMON_JAVA_OPTS , we recommend appending
+them to $SPARK_DAEMON_JAVA_OPTS as shown in the example below. This ensures that all
+default databricks managed environmental variables are included as well.
+Example Spark environment variables:
+{“SPARK_WORKER_MEMORY”: “28000m”, “SPARK_LOCAL_DIRS”: “/local_disk0”} or
+{“SPARK_DAEMON_JAVA_OPTS”: “$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true”}
+
+
+
+
+spark_version : str | None = None
+The Spark version of the cluster, e.g. 3.3.x-scala2.11 .
+A list of available Spark versions can be retrieved by using
+the :method:clusters/sparkVersions API call.
+
+
+
+
+ssh_public_keys : list [ str ]
+SSH public key contents that will be added to each Spark node in this cluster. The
+corresponding private keys can be used to login with the user name ubuntu on port 2200 .
+Up to 10 keys can be specified.
+
+
+
+
+use_ml_runtime : bool | None = None
+This field can only be used with kind .
+effective_spark_version is determined by spark_version (DBR release), this field use_ml_runtime , and whether node_type_id is gpu node or not.
+
+
+
+
+workload_type : WorkloadType | None = None
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class Condition
+
+
+ANY_UPDATED = 'ANY_UPDATED'
+
+
+
+
+ALL_UPDATED = 'ALL_UPDATED'
+
+
+
+
+
+
+class ConditionTask
+
+
+left : str
+The left operand of the condition task. Can be either a string value or a job state or parameter reference.
+
+
+
+
+op : ConditionTaskOp
+
+EQUAL_TO , NOT_EQUAL operators perform string comparison of their operands. This means that “12.0” == “12” will evaluate to false .
+GREATER_THAN , GREATER_THAN_OR_EQUAL , LESS_THAN , LESS_THAN_OR_EQUAL operators perform numeric comparison of their operands. “12.0” >= “12” will evaluate to true , “10.0” >= “12” will evaluate to false .
+
+The boolean comparison to task values can be implemented with operators EQUAL_TO , NOT_EQUAL . If a task value was set to a boolean value, it will be serialized to “true” or “false” for the comparison.
+
+
+
+
+right : str
+The right operand of the condition task. Can be either a string value or a job state or parameter reference.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class ConditionTaskOp
+
+EQUAL_TO , NOT_EQUAL operators perform string comparison of their operands. This means that “12.0” == “12” will evaluate to false .
+GREATER_THAN , GREATER_THAN_OR_EQUAL , LESS_THAN , LESS_THAN_OR_EQUAL operators perform numeric comparison of their operands. “12.0” >= “12” will evaluate to true , “10.0” >= “12” will evaluate to false .
+
+The boolean comparison to task values can be implemented with operators EQUAL_TO , NOT_EQUAL . If a task value was set to a boolean value, it will be serialized to “true” or “false” for the comparison.
+
+
+EQUAL_TO = 'EQUAL_TO'
+
+
+
+
+GREATER_THAN = 'GREATER_THAN'
+
+
+
+
+GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL'
+
+
+
+
+LESS_THAN = 'LESS_THAN'
+
+
+
+
+LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL'
+
+
+
+
+NOT_EQUAL = 'NOT_EQUAL'
+
+
+
+
+
+
+class Continuous
+
+
+pause_status : PauseStatus | None = None
+Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class CronSchedule
+
+
+quartz_cron_expression : str
+A Cron expression using Quartz syntax that describes the schedule for a job. See Cron Trigger for details. This field is required.
+
+
+
+
+pause_status : PauseStatus | None = None
+Indicate whether this schedule is paused or not.
+
+
+
+
+timezone_id : str = 'UTC'
+A Java timezone ID. The schedule for a job is resolved with respect to this timezone. See Java TimeZone for details. This field is required.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class DataSecurityMode
+Data security mode decides what data governance model to use when accessing data
+from a cluster.
+The following modes can only be used with kind .
+* DATA_SECURITY_MODE_AUTO : Databricks will choose the most appropriate access mode depending on your compute configuration.
+* DATA_SECURITY_MODE_STANDARD : Alias for USER_ISOLATION .
+* DATA_SECURITY_MODE_DEDICATED : Alias for SINGLE_USER .
+The following modes can be used regardless of kind .
+* NONE : No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode.
+* SINGLE_USER : A secure cluster that can only be exclusively used by a single user specified in single_user_name . Most programming languages, cluster features and data governance features are available in this mode.
+* USER_ISOLATION : A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other’s data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited.
+The following modes are deprecated starting with Databricks Runtime 15.0 and
+will be removed for future Databricks Runtime versions:
+
+LEGACY_TABLE_ACL : This mode is for users migrating from legacy Table ACL clusters.
+LEGACY_PASSTHROUGH : This mode is for users migrating from legacy Passthrough on high concurrency clusters.
+LEGACY_SINGLE_USER : This mode is for users migrating from legacy Passthrough on standard clusters.
+LEGACY_SINGLE_USER_STANDARD : This mode provides a way that doesn’t have UC nor passthrough enabled.
+
+
+
+DATA_SECURITY_MODE_AUTO = 'DATA_SECURITY_MODE_AUTO'
+
+
+
+
+DATA_SECURITY_MODE_STANDARD = 'DATA_SECURITY_MODE_STANDARD'
+
+
+
+
+DATA_SECURITY_MODE_DEDICATED = 'DATA_SECURITY_MODE_DEDICATED'
+
+
+
+
+NONE = 'NONE'
+
+
+
+
+SINGLE_USER = 'SINGLE_USER'
+
+
+
+
+USER_ISOLATION = 'USER_ISOLATION'
+
+
+
+
+LEGACY_TABLE_ACL = 'LEGACY_TABLE_ACL'
+
+
+
+
+LEGACY_PASSTHROUGH = 'LEGACY_PASSTHROUGH'
+
+
+
+
+LEGACY_SINGLE_USER = 'LEGACY_SINGLE_USER'
+
+
+
+
+LEGACY_SINGLE_USER_STANDARD = 'LEGACY_SINGLE_USER_STANDARD'
+
+
+
+
+
+
+class DbfsStorageInfo
+
+
+destination : str
+dbfs destination, e.g. dbfs:/my/path
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class DbtTask
+
+
+commands : list [ str ]
+A list of dbt commands to execute. All commands must start with dbt . This parameter must not be empty. A maximum of up to 10 commands can be provided.
+
+
+
+
+catalog : str | None = None
+Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks >= 1.1.1.
+
+
+
+
+profiles_directory : str | None = None
+Optional (relative) path to the profiles directory. Can only be specified if no warehouse_id is specified. If no warehouse_id is specified and this folder is unset, the root directory is used.
+
+
+
+
+project_directory : str | None = None
+Path to the project directory. Optional for Git sourced tasks, in which
+case if no value is provided, the root of the Git repository is used.
+
+
+
+
+schema : str | None = None
+Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the default schema is used.
+
+
+
+
+source : Source | None = None
+Optional location type of the project directory. When set to WORKSPACE , the project will be retrieved
+from the local Databricks workspace. When set to GIT , the project will be retrieved from a Git repository
+defined in git_source . If the value is empty, the task will use GIT if git_source is defined and WORKSPACE otherwise.
+
+
+
+
+
+warehouse_id : str | None = None
+ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the –profiles-dir command line argument.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class DockerBasicAuth
+
+
+password : str | None = None
+Password of the user
+
+
+
+
+username : str | None = None
+Name of the user
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class DockerImage
+
+
+basic_auth : DockerBasicAuth | None = None
+
+
+
+
+url : str | None = None
+URL of the docker image.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class EbsVolumeType
+The type of EBS volumes that will be launched with this cluster.
+
+
+GENERAL_PURPOSE_SSD = 'GENERAL_PURPOSE_SSD'
+
+
+
+
+THROUGHPUT_OPTIMIZED_HDD = 'THROUGHPUT_OPTIMIZED_HDD'
+
+
+
+
+
+
+class Environment
+The environment entity used to preserve serverless environment side panel and jobs’ environment for non-notebook task.
+In this minimal environment spec, only pip dependencies are supported.
+
+
+client : str
+Client version used by the environment
+The client is the user-facing environment of the runtime.
+Each client comes with a specific set of pre-installed libraries.
+The version is a string, consisting of the major client version.
+
+
+
+
+dependencies : list [ str ]
+List of pip dependencies, as supported by the version of pip in this environment.
+Each dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/
+Allowed dependency could be <requirement specifier>, <archive url/path>, <local project path>(WSFS or Volumes in Databricks), <vcs project url>
+E.g. dependencies: [“foo==0.0.1”, “-r /Workspace/test/requirements.txt”]
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class FileArrivalTriggerConfiguration
+
+
+url : str
+URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.
+
+
+
+
+min_time_between_triggers_seconds : int | None = None
+If set, the trigger starts a run only after the specified amount of time passed since
+the last time the trigger fired. The minimum allowed value is 60 seconds
+
+
+
+
+wait_after_last_change_seconds : int | None = None
+If set, the trigger starts a run only after no file activity has occurred for the specified amount of time.
+This makes it possible to wait for a batch of incoming files to arrive before triggering a run. The
+minimum allowed value is 60 seconds.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class ForEachTask
+
+
+inputs : str
+Array for task to iterate on. This can be a JSON string or a reference to
+an array parameter.
+
+
+
+
+task : Task
+Configuration for the task that will be run for each element in the array
+
+
+
+
+concurrency : int | None = None
+An optional maximum allowed number of concurrent runs of the task.
+Set this value if you want to be able to execute multiple runs of the task concurrently.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class GcpAttributes
+
+
+availability : GcpAvailability | None = None
+
+
+
+
+boot_disk_size : int | None = None
+boot disk size in GB
+
+
+
+
+google_service_account : str | None = None
+If provided, the cluster will impersonate the google service account when accessing
+gcloud services (like GCS). The google service account
+must have previously been added to the Databricks environment by an account
+administrator.
+
+
+
+
+local_ssd_count : int | None = None
+If provided, each node (workers and driver) in the cluster will have this number of local SSDs attached. Each local SSD is 375GB in size. Refer to GCP documentation for the supported number of local SSDs for each instance type.
+
+
+
+
+use_preemptible_executors : bool | None = None
+This field determines whether the spark executors will be scheduled to run on preemptible VMs (when set to true) versus standard compute engine VMs (when set to false; default).
+Note: Soon to be deprecated, use the availability field instead.
+
+
+
+
+zone_id : str | None = None
+Identifier for the availability zone in which the cluster resides.
+This can be one of the following:
+- “HA” => High availability, spread nodes across availability zones for a Databricks deployment region [default]
+- “AUTO” => Databricks picks an availability zone to schedule the cluster on.
+- A GCP availability zone => Pick One of the available zones for (machine type + region) from https://cloud.google.com/compute/docs/regions-zones .
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class GcpAvailability
+This field determines whether the instance pool will contain preemptible
+VMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.
+
+
+PREEMPTIBLE_GCP = 'PREEMPTIBLE_GCP'
+
+
+
+
+ON_DEMAND_GCP = 'ON_DEMAND_GCP'
+
+
+
+
+PREEMPTIBLE_WITH_FALLBACK_GCP = 'PREEMPTIBLE_WITH_FALLBACK_GCP'
+
+
+
+
+
+
+class GcsStorageInfo
+
+
+destination : str
+GCS destination/URI, e.g. gs://my-bucket/some-prefix
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class GitProvider
+
+
+GIT_HUB = 'gitHub'
+
+
+
+
+BITBUCKET_CLOUD = 'bitbucketCloud'
+
+
+
+
+AZURE_DEV_OPS_SERVICES = 'azureDevOpsServices'
+
+
+
+
+GIT_HUB_ENTERPRISE = 'gitHubEnterprise'
+
+
+
+
+BITBUCKET_SERVER = 'bitbucketServer'
+
+
+
+
+GIT_LAB = 'gitLab'
+
+
+
+
+GIT_LAB_ENTERPRISE_EDITION = 'gitLabEnterpriseEdition'
+
+
+
+
+AWS_CODE_COMMIT = 'awsCodeCommit'
+
+
+
+
+
+
+class GitSnapshot
+Read-only state of the remote repository at the time the job was run. This field is only included on job runs.
+
+
+used_commit : str | None = None
+Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class GitSource
+An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
+If git_source is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting source to WORKSPACE on the task.
+Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, git_source must be defined on the job.
+
+
+git_provider : GitProvider
+Unique identifier of the service used to host the Git repository. The value is case insensitive.
+
+
+
+
+git_url : str
+URL of the repository to be cloned by this job.
+
+
+
+
+git_branch : str | None = None
+Name of the branch to be checked out and used by this job. This field cannot be specified in conjunction with git_tag or git_commit.
+
+
+
+
+git_commit : str | None = None
+Commit to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_tag.
+
+
+
+
+git_tag : str | None = None
+Name of the tag to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_commit.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class InitScriptInfo
+
+
+abfss : Adlsgen2Info | None = None
+destination needs to be provided. e.g.
+{ “abfss” : { “destination” : “abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>” } }
+
+
+
+
+dbfs : DbfsStorageInfo | None = None
+destination needs to be provided. e.g.
+{ “dbfs” : { “destination” : “dbfs:/home/cluster_log” } }
+
+
+
+
+file : LocalFileInfo | None = None
+destination needs to be provided. e.g.
+{ “file” : { “destination” : “file:/my/local/file.sh” } }
+
+
+
+
+gcs : GcsStorageInfo | None = None
+destination needs to be provided. e.g.
+{ “gcs”: { “destination”: “gs://my-bucket/file.sh” } }
+
+
+
+
+s3 : S3StorageInfo | None = None
+destination and either the region or endpoint need to be provided. e.g.
+{ “s3”: { “destination” : “s3://cluster_log_bucket/prefix”, “region” : “us-west-2” } }
+Cluster iam role is used to access s3, please make sure the cluster iam role in
+instance_profile_arn has permission to write data to the s3 destination.
+
+
+
+
+volumes : VolumesStorageInfo | None = None
+destination needs to be provided. e.g.
+{ “volumes” : { “destination” : “/Volumes/my-init.sh” } }
+
+
+
+
+workspace : WorkspaceStorageInfo | None = None
+destination needs to be provided. e.g.
+{ “workspace” : { “destination” : “/Users/user1@databricks.com/my-init.sh” } }
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class Job
+
+
+budget_policy_id : str | None = None
+The id of the user specified budget policy to use for this job.
+If not specified, a default budget policy may be applied when creating or modifying the job.
+See effective_budget_policy_id for the budget policy used by this workload.
+
+
+
+
+continuous : Continuous | None = None
+An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of schedule and continuous can be used.
+
+
+
+
+description : str | None = None
+An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.
+
+
+
+
+email_notifications : JobEmailNotifications | None = None
+An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.
+
+
+
+
+environments : list [ JobEnvironment ]
+A list of task execution environment specifications that can be referenced by serverless tasks of this job.
+An environment is required to be present for serverless tasks.
+For serverless notebook tasks, the environment is accessible in the notebook environment panel.
+For other serverless tasks, the task environment is required to be specified using environment_key in the task settings.
+
+
+
+
+git_source : GitSource | None = None
+An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
+If git_source is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting source to WORKSPACE on the task.
+Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, git_source must be defined on the job.
+
+
+
+
+health : JobsHealthRules | None = None
+
+
+
+
+job_clusters : list [ JobCluster ]
+A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
+
+
+
+
+max_concurrent_runs : int | None = None
+An optional maximum allowed number of concurrent runs of the job.
+Set this value if you want to be able to execute multiple runs of the same job concurrently.
+This is useful for example if you trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each other, or if you want to trigger multiple runs which differ by their input parameters.
+This setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs.
+However, from then on, new runs are skipped unless there are fewer than 3 active runs.
+This value cannot exceed 1000. Setting this value to 0 causes all new runs to be skipped.
+
+
+
+
+name : str | None = None
+An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.
+
+
+
+
+notification_settings : JobNotificationSettings | None = None
+Optional notification settings that are used when sending notifications to each of the email_notifications and webhook_notifications for this job.
+
+
+
+
+parameters : list [ JobParameterDefinition ]
+Job-level parameter definitions
+
+
+
+
+permissions : list [ Permission ]
+
+
+
+
+queue : QueueSettings | None = None
+The queue settings of the job.
+
+
+
+
+run_as : JobRunAs | None = None
+
+
+
+
+schedule : CronSchedule | None = None
+An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to runNow .
+
+
+
+
+tags : dict [ str , str ]
+A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.
+
+
+
+
+tasks : list [ Task ]
+A list of task specifications to be executed by this job.
+
+
+
+
+timeout_seconds : int | None = None
+An optional timeout applied to each run of this job. A value of 0 means no timeout.
+
+
+
+
+trigger : TriggerSettings | None = None
+A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to runNow .
+
+
+
+
+webhook_notifications : WebhookNotifications | None = None
+A collection of system notification IDs to notify when runs of this job begin or complete.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class JobCluster
+
+
+job_cluster_key : str
+A unique name for the job cluster. This field is required and must be unique within the job.
+JobTaskSettings may refer to this field to determine which cluster to launch for the task execution.
+
+
+
+
+new_cluster : ClusterSpec
+If new_cluster, a description of a cluster that is created for each task.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class JobEmailNotifications
+
+
+no_alert_for_skipped_runs : bool | None = None
+If true, do not send email to recipients specified in on_failure if the run is skipped.
+This field is deprecated . Please use the notification_settings.no_alert_for_skipped_runs field.
+
+
+
+
+on_duration_warning_threshold_exceeded : list [ str ]
+A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the RUN_DURATION_SECONDS metric in the health field. If no rule for the RUN_DURATION_SECONDS metric is specified in the health field for the job, notifications are not sent.
+
+
+
+
+on_failure : list [ str ]
+A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an INTERNAL_ERROR life_cycle_state or a FAILED , or TIMED_OUT result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.
+
+
+
+
+on_start : list [ str ]
+A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.
+
+
+
+
+on_streaming_backlog_exceeded : list [ str ]
+A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.
+Streaming backlog thresholds can be set in the health field using the following metrics: STREAMING_BACKLOG_BYTES , STREAMING_BACKLOG_RECORDS , STREAMING_BACKLOG_SECONDS , or STREAMING_BACKLOG_FILES .
+Alerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.
+
+
+
+
+on_success : list [ str ]
+A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a TERMINATED life_cycle_state and a SUCCESS result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class JobEnvironment
+
+
+environment_key : str
+The key of an environment. It has to be unique within a job.
+
+
+
+
+spec : Environment | None = None
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class JobNotificationSettings
+
+
+no_alert_for_canceled_runs : bool | None = None
+If true, do not send notifications to recipients specified in on_failure if the run is canceled.
+
+
+
+
+no_alert_for_skipped_runs : bool | None = None
+If true, do not send notifications to recipients specified in on_failure if the run is skipped.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class JobParameterDefinition
+
+
+default : str
+Default value of the parameter.
+
+
+
+
+name : str
+The name of the defined parameter. May only contain alphanumeric characters, _ , - , and .
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class JobRunAs
+Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job.
+Either user_name or service_principal_name should be specified. If not, an error is thrown.
+
+
+service_principal_name : str | None = None
+Application ID of an active service principal. Setting this field requires the servicePrincipal/user role.
+
+
+
+
+user_name : str | None = None
+The email of an active workspace user. Non-admin users can only set this field to their own email.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class JobsHealthMetric
+Specifies the health metric that is being evaluated for a particular health rule.
+
+RUN_DURATION_SECONDS : Expected total time for a run in seconds.
+STREAMING_BACKLOG_BYTES : An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Public Preview.
+STREAMING_BACKLOG_RECORDS : An estimate of the maximum offset lag across all streams. This metric is in Public Preview.
+STREAMING_BACKLOG_SECONDS : An estimate of the maximum consumer delay across all streams. This metric is in Public Preview.
+STREAMING_BACKLOG_FILES : An estimate of the maximum number of outstanding files across all streams. This metric is in Public Preview.
+
+
+
+RUN_DURATION_SECONDS = 'RUN_DURATION_SECONDS'
+
+
+
+
+STREAMING_BACKLOG_BYTES = 'STREAMING_BACKLOG_BYTES'
+
+
+
+
+STREAMING_BACKLOG_RECORDS = 'STREAMING_BACKLOG_RECORDS'
+
+
+
+
+STREAMING_BACKLOG_SECONDS = 'STREAMING_BACKLOG_SECONDS'
+
+
+
+
+STREAMING_BACKLOG_FILES = 'STREAMING_BACKLOG_FILES'
+
+
+
+
+
+
+class JobsHealthOperator
+Specifies the operator used to compare the health metric value with the specified threshold.
+
+
+GREATER_THAN = 'GREATER_THAN'
+
+
+
+
+
+
+class JobsHealthRule
+
+
+metric : JobsHealthMetric
+
+
+
+
+op : JobsHealthOperator
+
+
+
+
+value : int
+Specifies the threshold value that the health metric should obey to satisfy the health rule.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class JobsHealthRules
+An optional set of health rules that can be defined for this job.
+
+
+rules : list [ JobsHealthRule ]
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class Library
+
+
+cran : RCranLibrary | None = None
+Specification of a CRAN library to be installed as part of the library
+
+
+
+
+egg : str | None = None
+Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.
+
+
+
+
+jar : str | None = None
+URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.
+For example: { “jar”: “/Workspace/path/to/library.jar” } , { “jar” : “/Volumes/path/to/library.jar” } or
+{ “jar”: “s3://my-bucket/library.jar” } .
+If S3 is used, please make sure the cluster has read access on the library. You may need to
+launch the cluster with an IAM role to access the S3 URI.
+
+
+
+
+maven : MavenLibrary | None = None
+Specification of a maven library to be installed. For example:
+{ “coordinates”: “org.jsoup:jsoup:1.7.2” }
+
+
+
+
+pypi : PythonPyPiLibrary | None = None
+Specification of a PyPi library to be installed. For example:
+{ “package”: “simplejson” }
+
+
+
+
+requirements : str | None = None
+URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes paths are supported.
+For example: { “requirements”: “/Workspace/path/to/requirements.txt” } or { “requirements” : “/Volumes/path/to/requirements.txt” }
+
+
+
+
+whl : str | None = None
+URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.
+For example: { “whl”: “/Workspace/path/to/library.whl” } , { “whl” : “/Volumes/path/to/library.whl” } or
+{ “whl”: “s3://my-bucket/library.whl” } .
+If S3 is used, please make sure the cluster has read access on the library. You may need to
+launch the cluster with an IAM role to access the S3 URI.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class LocalFileInfo
+
+
+destination : str
+local file destination, e.g. file:/my/local/file.sh
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class LogAnalyticsInfo
+
+
+log_analytics_primary_key : str | None = None
+<needs content added>
+
+
+
+
+log_analytics_workspace_id : str | None = None
+<needs content added>
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class MavenLibrary
+
+
+coordinates : str
+Gradle-style maven coordinates. For example: “org.jsoup:jsoup:1.7.2”.
+
+
+
+
+exclusions : list [ str ]
+List of dependences to exclude. For example: [“slf4j:slf4j”, “*:hadoop-client”] .
+Maven dependency exclusions:
+https://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html .
+
+
+
+
+repo : str | None = None
+Maven repo to install the Maven package from. If omitted, both Maven Central Repository
+and Spark Packages are searched.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class NotebookTask
+
+
+notebook_path : str
+The path of the notebook to be run in the Databricks workspace or remote repository.
+For notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash.
+For notebooks stored in a remote repository, the path must be relative. This field is required.
+
+
+
+
+base_parameters : dict [ str , str ]
+Base parameters to be used for each run of this job. If the run is initiated by a call to :method:jobs/run
+Now with parameters specified, the two parameters maps are merged. If the same key is specified in
+base_parameters and in run-now , the value from run-now is used.
+Use Task parameter variables to set parameters containing information about job runs.
+If the notebook takes a parameter that is not specified in the job’s base_parameters or the run-now override parameters,
+the default value from the notebook is used.
+Retrieve these parameters in a notebook using dbutils.widgets.get .
+The JSON representation of this field cannot exceed 1MB.
+
+
+
+
+source : Source | None = None
+Optional location type of the notebook. When set to WORKSPACE , the notebook will be retrieved from the local Databricks workspace. When set to GIT , the notebook will be retrieved from a Git repository
+defined in git_source . If the value is empty, the task will use GIT if git_source is defined and WORKSPACE otherwise.
+* WORKSPACE : Notebook is located in Databricks workspace.
+* GIT : Notebook is located in cloud Git provider.
+
+
+
+
+warehouse_id : str | None = None
+Optional warehouse_id to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT supported, please use serverless or pro SQL warehouses.
+Note that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the run will fail.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class PauseStatus
+
+
+UNPAUSED = 'UNPAUSED'
+
+
+
+
+PAUSED = 'PAUSED'
+
+
+
+
+
+
+class PeriodicTriggerConfiguration
+
+
+interval : int
+The interval at which the trigger should run.
+
+
+
+
+unit : PeriodicTriggerConfigurationTimeUnit
+The unit of time for the interval.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class PeriodicTriggerConfigurationTimeUnit
+
+
+HOURS = 'HOURS'
+
+
+
+
+DAYS = 'DAYS'
+
+
+
+
+WEEKS = 'WEEKS'
+
+
+
+
+
+
+class Permission
+
+
+level : str
+The allowed permission for user, group, service principal defined for this permission.
+
+
+
+
+group_name : str | None = None
+The name of the group that has the permission set in level.
+
+
+
+
+service_principal_name : str | None = None
+The name of the service principal that has the permission set in level.
+
+
+
+
+user_name : str | None = None
+The name of the user that has the permission set in level.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class PipelineTask
+
+
+pipeline_id : str
+The full name of the pipeline task to execute.
+
+
+
+
+full_refresh : bool | None = None
+If true, triggers a full refresh on the delta live table.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class PythonPyPiLibrary
+
+
+package : str
+The name of the pypi package to install. An optional exact version specification is also
+supported. Examples: “simplejson” and “simplejson==3.8.0”.
+
+
+
+
+repo : str | None = None
+The repository where the package can be found. If not specified, the default pip index is
+used.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class PythonWheelTask
+
+
+entry_point : str
+Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using $packageName.$entryPoint()
+
+
+
+
+package_name : str
+Name of the package to execute
+
+
+
+
+named_parameters : dict [ str , str ]
+Command-line parameters passed to Python wheel task in the form of [”–name=task”, “–data=dbfs:/path/to/data.json”] . Leave it empty if parameters is not null.
+
+
+
+
+parameters : list [ str ]
+Command-line parameters passed to Python wheel task. Leave it empty if named_parameters is not null.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class QueueSettings
+
+
+enabled : bool
+If true, enable queueing for the job. This is a required field.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class RCranLibrary
+
+
+package : str
+The name of the CRAN package to install.
+
+
+
+
+repo : str | None = None
+The repository where the package can be found. If not specified, the default CRAN repo is used.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class RunIf
+An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to ALL_SUCCESS .
+Possible values are:
+* ALL_SUCCESS : All dependencies have executed and succeeded
+* AT_LEAST_ONE_SUCCESS : At least one dependency has succeeded
+* NONE_FAILED : None of the dependencies have failed and at least one was executed
+* ALL_DONE : All dependencies have been completed
+* AT_LEAST_ONE_FAILED : At least one dependency failed
+* ALL_FAILED : ALl dependencies have failed
+
+
+ALL_SUCCESS = 'ALL_SUCCESS'
+
+
+
+
+ALL_DONE = 'ALL_DONE'
+
+
+
+
+NONE_FAILED = 'NONE_FAILED'
+
+
+
+
+AT_LEAST_ONE_SUCCESS = 'AT_LEAST_ONE_SUCCESS'
+
+
+
+
+ALL_FAILED = 'ALL_FAILED'
+
+
+
+
+AT_LEAST_ONE_FAILED = 'AT_LEAST_ONE_FAILED'
+
+
+
+
+
+
+class RunJobTask
+
+
+job_id : int
+ID of the job to trigger.
+
+
+
+
+job_parameters : dict [ str , str ]
+Job-level parameters used to trigger the job.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class RuntimeEngine
+Determines the cluster’s runtime engine, either standard or Photon.
+This field is not compatible with legacy spark_version values that contain -photon- .
+Remove -photon- from the spark_version and set runtime_engine to PHOTON .
+If left unspecified, the runtime engine defaults to standard unless the spark_version
+contains -photon-, in which case Photon will be used.
+
+
+NULL = 'NULL'
+
+
+
+
+STANDARD = 'STANDARD'
+
+
+
+
+PHOTON = 'PHOTON'
+
+
+
+
+
+
+class S3StorageInfo
+
+
+destination : str
+S3 destination, e.g. s3://my-bucket/some-prefix Note that logs will be delivered using
+cluster iam role, please make sure you set cluster iam role and the role has write access to the
+destination. Please also note that you cannot use AWS keys to deliver logs.
+
+
+
+
+canned_acl : str | None = None
+(Optional) Set canned access control list for the logs, e.g. bucket-owner-full-control .
+If canned_cal is set, please make sure the cluster iam role has s3:PutObjectAcl permission on
+the destination bucket and prefix. The full list of possible canned acl can be found at
+http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl .
+Please also note that by default only the object owner gets full controls. If you are using cross account
+role for writing data, you may want to set bucket-owner-full-control to make bucket owner able to
+read the logs.
+
+
+
+
+enable_encryption : bool | None = None
+(Optional) Flag to enable server side encryption, false by default.
+
+
+
+
+encryption_type : str | None = None
+(Optional) The encryption type, it could be sse-s3 or sse-kms . It will be used only when
+encryption is enabled and the default type is sse-s3 .
+
+
+
+
+endpoint : str | None = None
+S3 endpoint, e.g. https://s3-us-west-2.amazonaws.com . Either region or endpoint needs to be set.
+If both are set, endpoint will be used.
+
+
+
+
+kms_key : str | None = None
+(Optional) Kms key which will be used if encryption is enabled and encryption type is set to sse-kms .
+
+
+
+
+region : str | None = None
+S3 region, e.g. us-west-2 . Either region or endpoint needs to be set. If both are set,
+endpoint will be used.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class Source
+Optional location type of the SQL file. When set to WORKSPACE , the SQL file will be retrieved from the local Databricks workspace. When set to GIT , the SQL file will be retrieved from a Git repository
+defined in git_source . If the value is empty, the task will use GIT if git_source is defined and WORKSPACE otherwise.
+
+
+
+WORKSPACE = 'WORKSPACE'
+
+
+
+
+GIT = 'GIT'
+
+
+
+
+
+
+class SparkJarTask
+
+
+main_class_name : str
+The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library.
+The code must use SparkContext.getOrCreate to obtain a Spark context; otherwise, runs of the job fail.
+
+
+
+
+jar_uri : str | None = None
+Deprecated since 04/2016. Provide a jar through the libraries field instead. For an example, see :method:jobs/create.
+
+
+
+
+parameters : list [ str ]
+Parameters passed to the main method.
+Use Task parameter variables to set parameters containing information about job runs.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class SparkPythonTask
+
+
+python_file : str
+The Python file to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with / . For files stored in a remote repository, the path must be relative. This field is required.
+
+
+
+
+parameters : list [ str ]
+Command line parameters passed to the Python file.
+Use Task parameter variables to set parameters containing information about job runs.
+
+
+
+
+source : Source | None = None
+Optional location type of the Python file. When set to WORKSPACE or not specified, the file will be retrieved from the local
+Databricks workspace or cloud location (if the python_file has a URI format). When set to GIT ,
+the Python file will be retrieved from a Git repository defined in git_source .
+
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class SparkSubmitTask
+
+
+parameters : list [ str ]
+Command-line parameters passed to spark submit.
+Use Task parameter variables to set parameters containing information about job runs.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class SqlTask
+
+
+warehouse_id : str
+The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.
+
+
+
+
+alert : SqlTaskAlert | None = None
+If alert, indicates that this job must refresh a SQL alert.
+
+
+
+
+dashboard : SqlTaskDashboard | None = None
+If dashboard, indicates that this job must refresh a SQL dashboard.
+
+
+
+
+file : SqlTaskFile | None = None
+If file, indicates that this job runs a SQL file in a remote Git repository.
+
+
+
+
+parameters : dict [ str , str ]
+Parameters to be used for each run of this job. The SQL alert task does not support custom parameters.
+
+
+
+
+query : SqlTaskQuery | None = None
+If query, indicates that this job must execute a SQL query.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class SqlTaskAlert
+
+
+alert_id : str
+The canonical identifier of the SQL alert.
+
+
+
+
+subscriptions : list [ SqlTaskSubscription ]
+If specified, alert notifications are sent to subscribers.
+
+
+
+
+pause_subscriptions : bool | None = None
+If true, the alert notifications are not sent to subscribers.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class SqlTaskDashboard
+
+
+dashboard_id : str
+The canonical identifier of the SQL dashboard.
+
+
+
+
+custom_subject : str | None = None
+Subject of the email sent to subscribers of this task.
+
+
+
+
+pause_subscriptions : bool | None = None
+If true, the dashboard snapshot is not taken, and emails are not sent to subscribers.
+
+
+
+
+subscriptions : list [ SqlTaskSubscription ]
+If specified, dashboard snapshots are sent to subscriptions.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class SqlTaskFile
+
+
+path : str
+Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.
+
+
+
+
+source : Source | None = None
+Optional location type of the SQL file. When set to WORKSPACE , the SQL file will be retrieved
+from the local Databricks workspace. When set to GIT , the SQL file will be retrieved from a Git repository
+defined in git_source . If the value is empty, the task will use GIT if git_source is defined and WORKSPACE otherwise.
+
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class SqlTaskQuery
+
+
+query_id : str
+The canonical identifier of the SQL query.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class SqlTaskSubscription
+
+
+destination_id : str | None = None
+The canonical identifier of the destination to receive email notification. This parameter is mutually exclusive with user_name. You cannot set both destination_id and user_name for subscription notifications.
+
+
+
+
+user_name : str | None = None
+The user name to receive the subscription email. This parameter is mutually exclusive with destination_id. You cannot set both destination_id and user_name for subscription notifications.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class TableUpdateTriggerConfiguration
+
+
+condition : Condition | None = None
+The table(s) condition based on which to trigger a job run.
+
+
+
+
+min_time_between_triggers_seconds : int | None = None
+If set, the trigger starts a run only after the specified amount of time has passed since
+the last time the trigger fired. The minimum allowed value is 60 seconds.
+
+
+
+
+table_names : list [ str ]
+A list of Delta tables to monitor for changes. The table name must be in the format catalog_name.schema_name.table_name .
+
+
+
+
+wait_after_last_change_seconds : int | None = None
+If set, the trigger starts a run only after no table updates have occurred for the specified time
+and can be used to wait for a series of table updates before triggering a run. The
+minimum allowed value is 60 seconds.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class Task
+
+
+task_key : str
+A unique name for the task. This field is used to refer to this task from other tasks.
+This field is required and must be unique within its parent job.
+On Update or Reset, this field is used to reference the tasks to be updated or reset.
+
+
+
+
+condition_task : ConditionTask | None = None
+The task evaluates a condition that can be used to control the execution of other tasks when the condition_task field is present.
+The condition task does not require a cluster to execute and does not support retries or notifications.
+
+
+
+
+dbt_task : DbtTask | None = None
+The task runs one or more dbt commands when the dbt_task field is present. The dbt task requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.
+
+
+
+
+depends_on : list [ TaskDependency ]
+An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete before executing this task. The task will run only if the run_if condition is true.
+The key is task_key , and the value is the name assigned to the dependent task.
+
+
+
+
+description : str | None = None
+An optional description for this task.
+
+
+
+
+disable_auto_optimization : bool | None = None
+An option to disable auto optimization in serverless
+
+
+
+
+email_notifications : TaskEmailNotifications | None = None
+An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.
+
+
+
+
+environment_key : str | None = None
+The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.
+
+
+
+
+existing_cluster_id : str | None = None
+If existing_cluster_id, the ID of an existing cluster that is used for all runs.
+When running jobs or tasks on an existing cluster, you may need to manually restart
+the cluster if it stops responding. We suggest running jobs and tasks on new clusters for
+greater reliability
+
+
+
+
+for_each_task : ForEachTask | None = None
+The task executes a nested task for every input provided when the for_each_task field is present.
+
+
+
+
+health : JobsHealthRules | None = None
+
+
+
+
+job_cluster_key : str | None = None
+If job_cluster_key, this task is executed reusing the cluster specified in job.settings.job_clusters .
+
+
+
+
+libraries : list [ Library ]
+An optional list of libraries to be installed on the cluster.
+The default value is an empty list.
+
+
+
+
+max_retries : int | None = None
+An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with the FAILED result_state or INTERNAL_ERROR life_cycle_state . The value -1 means to retry indefinitely and the value 0 means to never retry.
+
+
+
+
+min_retry_interval_millis : int | None = None
+An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried.
+
+
+
+
+new_cluster : ClusterSpec | None = None
+If new_cluster, a description of a new cluster that is created for each run.
+
+
+
+
+notebook_task : NotebookTask | None = None
+The task runs a notebook when the notebook_task field is present.
+
+
+
+
+notification_settings : TaskNotificationSettings | None = None
+Optional notification settings that are used when sending notifications to each of the email_notifications and webhook_notifications for this task.
+
+
+
+
+pipeline_task : PipelineTask | None = None
+The task triggers a pipeline update when the pipeline_task field is present. Only pipelines configured to use triggered more are supported.
+
+
+
+
+python_wheel_task : PythonWheelTask | None = None
+The task runs a Python wheel when the python_wheel_task field is present.
+
+
+
+
+retry_on_timeout : bool | None = None
+An optional policy to specify whether to retry a job when it times out. The default behavior
+is to not retry on timeout.
+
+
+
+
+run_if : RunIf | None = None
+An optional value specifying the condition determining whether the task is run once its dependencies have been completed.
+
+ALL_SUCCESS : All dependencies have executed and succeeded
+AT_LEAST_ONE_SUCCESS : At least one dependency has succeeded
+NONE_FAILED : None of the dependencies have failed and at least one was executed
+ALL_DONE : All dependencies have been completed
+AT_LEAST_ONE_FAILED : At least one dependency failed
+ALL_FAILED : ALl dependencies have failed
+
+
+
+
+
+run_job_task : RunJobTask | None = None
+The task triggers another job when the run_job_task field is present.
+
+
+
+
+spark_jar_task : SparkJarTask | None = None
+The task runs a JAR when the spark_jar_task field is present.
+
+
+
+
+spark_python_task : SparkPythonTask | None = None
+The task runs a Python file when the spark_python_task field is present.
+
+
+
+
+spark_submit_task : SparkSubmitTask | None = None
+(Legacy) The task runs the spark-submit script when the spark_submit_task field is present. This task can run only on new clusters and is not compatible with serverless compute.
+In the new_cluster specification, libraries and spark_conf are not supported. Instead, use –jars and –py-files to add Java and Python libraries and –conf to set the Spark configurations.
+master , deploy-mode , and executor-cores are automatically configured by Databricks; you _cannot_ specify them in parameters.
+By default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set –driver-memory , and –executor-memory to a smaller value to leave some room for off-heap usage.
+The –jars , –py-files , –files arguments support DBFS and S3 paths.
+
+
+
+
+sql_task : SqlTask | None = None
+The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when the sql_task field is present.
+
+
+
+
+timeout_seconds : int | None = None
+An optional timeout applied to each run of this job task. A value of 0 means no timeout.
+
+
+
+
+webhook_notifications : WebhookNotifications | None = None
+A collection of system notification IDs to notify when runs of this task begin or complete. The default behavior is to not send any system notifications.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class TaskDependency
+
+
+task_key : str
+The name of the task this task depends on.
+
+
+
+
+outcome : str | None = None
+Can only be specified on condition task dependencies. The outcome of the dependent task that must be met for this task to run.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class TaskEmailNotifications
+
+
+no_alert_for_skipped_runs : bool | None = None
+If true, do not send email to recipients specified in on_failure if the run is skipped.
+This field is deprecated . Please use the notification_settings.no_alert_for_skipped_runs field.
+
+
+
+
+on_duration_warning_threshold_exceeded : list [ str ]
+A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the RUN_DURATION_SECONDS metric in the health field. If no rule for the RUN_DURATION_SECONDS metric is specified in the health field for the job, notifications are not sent.
+
+
+
+
+on_failure : list [ str ]
+A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an INTERNAL_ERROR life_cycle_state or a FAILED , or TIMED_OUT result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.
+
+
+
+
+on_start : list [ str ]
+A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.
+
+
+
+
+on_streaming_backlog_exceeded : list [ str ]
+A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.
+Streaming backlog thresholds can be set in the health field using the following metrics: STREAMING_BACKLOG_BYTES , STREAMING_BACKLOG_RECORDS , STREAMING_BACKLOG_SECONDS , or STREAMING_BACKLOG_FILES .
+Alerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.
+
+
+
+
+on_success : list [ str ]
+A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a TERMINATED life_cycle_state and a SUCCESS result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class TaskNotificationSettings
+
+
+alert_on_last_attempt : bool | None = None
+If true, do not send notifications to recipients specified in on_start for the retried runs and do not send notifications to recipients specified in on_failure until the last retry of the run.
+
+
+
+
+no_alert_for_canceled_runs : bool | None = None
+If true, do not send notifications to recipients specified in on_failure if the run is canceled.
+
+
+
+
+no_alert_for_skipped_runs : bool | None = None
+If true, do not send notifications to recipients specified in on_failure if the run is skipped.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class TriggerSettings
+
+
+file_arrival : FileArrivalTriggerConfiguration | None = None
+File arrival trigger settings.
+
+
+
+
+pause_status : PauseStatus | None = None
+Whether this trigger is paused or not.
+
+
+
+
+periodic : PeriodicTriggerConfiguration | None = None
+Periodic trigger settings.
+
+
+
+
+table_update : TableUpdateTriggerConfiguration | None = None
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class VolumesStorageInfo
+
+
+destination : str
+Unity Catalog Volumes file destination, e.g. /Volumes/my-init.sh
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class Webhook
+
+
+id : str
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class WebhookNotifications
+
+
+on_duration_warning_threshold_exceeded : list [ Webhook ]
+An optional list of system notification IDs to call when the duration of a run exceeds the threshold specified for the RUN_DURATION_SECONDS metric in the health field. A maximum of 3 destinations can be specified for the on_duration_warning_threshold_exceeded property.
+
+
+
+
+on_failure : list [ Webhook ]
+An optional list of system notification IDs to call when the run fails. A maximum of 3 destinations can be specified for the on_failure property.
+
+
+
+
+on_start : list [ Webhook ]
+An optional list of system notification IDs to call when the run starts. A maximum of 3 destinations can be specified for the on_start property.
+
+
+
+
+on_streaming_backlog_exceeded : list [ Webhook ]
+An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.
+Streaming backlog thresholds can be set in the health field using the following metrics: STREAMING_BACKLOG_BYTES , STREAMING_BACKLOG_RECORDS , STREAMING_BACKLOG_SECONDS , or STREAMING_BACKLOG_FILES .
+Alerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.
+A maximum of 3 destinations can be specified for the on_streaming_backlog_exceeded property.
+
+
+
+
+on_success : list [ Webhook ]
+An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the on_success property.
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class WorkloadType
+
+
+clients : ClientsTypes
+defined what type of clients can use the cluster. E.g. Notebooks, Jobs
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+class WorkspaceStorageInfo
+
+
+destination : str
+workspace files destination, e.g. /Users/user1@databricks.com/my-init.sh
+
+
+
+
+classmethod from_dict (
+
+
+value : dict ,
+
+
+) → Self
+
+
+
+
+as_dict ( ) → dict
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/experimental/python/genindex.html b/experimental/python/genindex.html
new file mode 100644
index 0000000000..44059b6227
--- /dev/null
+++ b/experimental/python/genindex.html
@@ -0,0 +1,1592 @@
+
+
+
+
+
+
+ Index — databricks-bundles beta documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index
+
+
+
A
+ |
B
+ |
C
+ |
D
+ |
E
+ |
F
+ |
G
+ |
H
+ |
I
+ |
J
+ |
K
+ |
L
+ |
M
+ |
N
+ |
O
+ |
P
+ |
Q
+ |
R
+ |
S
+ |
T
+ |
U
+ |
V
+ |
W
+ |
Z
+
+
+
A
+
+
+
B
+
+
+
C
+
+
+
D
+
+
+
E
+
+
+
F
+
+
+
G
+
+
+
H
+
+
+
I
+
+
+
J
+
+
+
K
+
+
+
L
+
+
+
M
+
+
+
N
+
+
+
O
+
+
+
P
+
+
+
Q
+
+
+
R
+
+
+
S
+
+
+
T
+
+
+
U
+
+
+
V
+
+
+
W
+
+
+
Z
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/experimental/python/index.html b/experimental/python/index.html
new file mode 100644
index 0000000000..dcbaecf86b
--- /dev/null
+++ b/experimental/python/index.html
@@ -0,0 +1,134 @@
+
+
+
+
+
+
+
+ databricks-bundles (Beta) — databricks-bundles beta documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+databricks-bundles (Beta)
+databricks-bundles package implements Python support for Databricks Asset Bundles.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/experimental/python/objects.inv b/experimental/python/objects.inv
new file mode 100644
index 0000000000..b4c618846a
Binary files /dev/null and b/experimental/python/objects.inv differ
diff --git a/experimental/python/search.html b/experimental/python/search.html
new file mode 100644
index 0000000000..43b73dbd5c
--- /dev/null
+++ b/experimental/python/search.html
@@ -0,0 +1,134 @@
+
+
+
+
+
+
+ Search — databricks-bundles beta documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Search
+
+
+
+
+ Please activate JavaScript to enable the search
+ functionality.
+
+
+
+
+
+
+ Searching for multiple words only shows matches that contain
+ all words.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/experimental/python/searchindex.js b/experimental/python/searchindex.js
new file mode 100644
index 0000000000..2b455d242a
--- /dev/null
+++ b/experimental/python/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({"alltitles": {"Classes": [[0, "classes"], [1, "classes"]], "Core": [[0, null]], "Decorators": [[0, "decorators"]], "Jobs": [[1, null]], "Methods": [[0, "methods"]], "databricks-bundles (Beta)": [[2, null]]}, "docnames": ["databricks.bundles.core", "databricks.bundles.jobs", "index"], "envversion": {"sphinx": 63, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1}, "filenames": ["databricks.bundles.core.rst", "databricks.bundles.jobs.rst", "index.rst"], "indexentries": {"abfss (initscriptinfo attribute)": [[1, "databricks.bundles.jobs.InitScriptInfo.abfss", false]], "add_diagnostic_error() (resources method)": [[0, "databricks.bundles.core.Resources.add_diagnostic_error", false]], "add_diagnostic_warning() (resources method)": [[0, "databricks.bundles.core.Resources.add_diagnostic_warning", false]], "add_diagnostics() (resources method)": [[0, "databricks.bundles.core.Resources.add_diagnostics", false]], "add_job() (resources method)": [[0, "databricks.bundles.core.Resources.add_job", false]], "add_location() (resources method)": [[0, "databricks.bundles.core.Resources.add_location", false]], "add_resource() (resources method)": [[0, "databricks.bundles.core.Resources.add_resource", false]], "add_resources() (resources method)": [[0, "databricks.bundles.core.Resources.add_resources", false]], "adlsgen2info (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Adlsgen2Info", false]], "alert (sqltask attribute)": [[1, "databricks.bundles.jobs.SqlTask.alert", false]], "alert_id (sqltaskalert attribute)": [[1, "databricks.bundles.jobs.SqlTaskAlert.alert_id", false]], "alert_on_last_attempt (tasknotificationsettings attribute)": [[1, "databricks.bundles.jobs.TaskNotificationSettings.alert_on_last_attempt", false]], "all_done (runif attribute)": [[1, "databricks.bundles.jobs.RunIf.ALL_DONE", false]], "all_failed (runif attribute)": [[1, "databricks.bundles.jobs.RunIf.ALL_FAILED", false]], "all_success (runif attribute)": [[1, "databricks.bundles.jobs.RunIf.ALL_SUCCESS", false]], "all_updated (condition attribute)": [[1, "databricks.bundles.jobs.Condition.ALL_UPDATED", false]], "any_updated (condition attribute)": [[1, "databricks.bundles.jobs.Condition.ANY_UPDATED", false]], "apply_policy_default_values (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.apply_policy_default_values", false]], "as_dict() (adlsgen2info method)": [[1, "databricks.bundles.jobs.Adlsgen2Info.as_dict", false]], "as_dict() (autoscale method)": [[1, "databricks.bundles.jobs.AutoScale.as_dict", false]], "as_dict() (awsattributes method)": [[1, "databricks.bundles.jobs.AwsAttributes.as_dict", false]], "as_dict() (azureattributes method)": [[1, "databricks.bundles.jobs.AzureAttributes.as_dict", false]], "as_dict() (clientstypes method)": [[1, "databricks.bundles.jobs.ClientsTypes.as_dict", false]], "as_dict() (clusterlogconf method)": [[1, "databricks.bundles.jobs.ClusterLogConf.as_dict", false]], "as_dict() (clusterspec method)": [[1, "databricks.bundles.jobs.ClusterSpec.as_dict", false]], "as_dict() (conditiontask method)": [[1, "databricks.bundles.jobs.ConditionTask.as_dict", false]], "as_dict() (continuous method)": [[1, "databricks.bundles.jobs.Continuous.as_dict", false]], "as_dict() (cronschedule method)": [[1, "databricks.bundles.jobs.CronSchedule.as_dict", false]], "as_dict() (dbfsstorageinfo method)": [[1, "databricks.bundles.jobs.DbfsStorageInfo.as_dict", false]], "as_dict() (dbttask method)": [[1, "databricks.bundles.jobs.DbtTask.as_dict", false]], "as_dict() (diagnostic method)": [[0, "databricks.bundles.core.Diagnostic.as_dict", false]], "as_dict() (dockerbasicauth method)": [[1, "databricks.bundles.jobs.DockerBasicAuth.as_dict", false]], "as_dict() (dockerimage method)": [[1, "databricks.bundles.jobs.DockerImage.as_dict", false]], "as_dict() (environment method)": [[1, "databricks.bundles.jobs.Environment.as_dict", false]], "as_dict() (filearrivaltriggerconfiguration method)": [[1, "databricks.bundles.jobs.FileArrivalTriggerConfiguration.as_dict", false]], "as_dict() (foreachtask method)": [[1, "databricks.bundles.jobs.ForEachTask.as_dict", false]], "as_dict() (gcpattributes method)": [[1, "databricks.bundles.jobs.GcpAttributes.as_dict", false]], "as_dict() (gcsstorageinfo method)": [[1, "databricks.bundles.jobs.GcsStorageInfo.as_dict", false]], "as_dict() (gitsnapshot method)": [[1, "databricks.bundles.jobs.GitSnapshot.as_dict", false]], "as_dict() (gitsource method)": [[1, "databricks.bundles.jobs.GitSource.as_dict", false]], "as_dict() (initscriptinfo method)": [[1, "databricks.bundles.jobs.InitScriptInfo.as_dict", false]], "as_dict() (job method)": [[1, "databricks.bundles.jobs.Job.as_dict", false]], "as_dict() (jobcluster method)": [[1, "databricks.bundles.jobs.JobCluster.as_dict", false]], "as_dict() (jobemailnotifications method)": [[1, "databricks.bundles.jobs.JobEmailNotifications.as_dict", false]], "as_dict() (jobenvironment method)": [[1, "databricks.bundles.jobs.JobEnvironment.as_dict", false]], "as_dict() (jobnotificationsettings method)": [[1, "databricks.bundles.jobs.JobNotificationSettings.as_dict", false]], "as_dict() (jobparameterdefinition method)": [[1, "databricks.bundles.jobs.JobParameterDefinition.as_dict", false]], "as_dict() (jobrunas method)": [[1, "databricks.bundles.jobs.JobRunAs.as_dict", false]], "as_dict() (jobshealthrule method)": [[1, "databricks.bundles.jobs.JobsHealthRule.as_dict", false]], "as_dict() (jobshealthrules method)": [[1, "databricks.bundles.jobs.JobsHealthRules.as_dict", false]], "as_dict() (library method)": [[1, "databricks.bundles.jobs.Library.as_dict", false]], "as_dict() (localfileinfo method)": [[1, "databricks.bundles.jobs.LocalFileInfo.as_dict", false]], "as_dict() (location method)": [[0, "databricks.bundles.core.Location.as_dict", false]], "as_dict() (loganalyticsinfo method)": [[1, "databricks.bundles.jobs.LogAnalyticsInfo.as_dict", false]], "as_dict() (mavenlibrary method)": [[1, "databricks.bundles.jobs.MavenLibrary.as_dict", false]], "as_dict() (notebooktask method)": [[1, "databricks.bundles.jobs.NotebookTask.as_dict", false]], "as_dict() (periodictriggerconfiguration method)": [[1, "databricks.bundles.jobs.PeriodicTriggerConfiguration.as_dict", false]], "as_dict() (permission method)": [[1, "databricks.bundles.jobs.Permission.as_dict", false]], "as_dict() (pipelinetask method)": [[1, "databricks.bundles.jobs.PipelineTask.as_dict", false]], "as_dict() (pythonpypilibrary method)": [[1, "databricks.bundles.jobs.PythonPyPiLibrary.as_dict", false]], "as_dict() (pythonwheeltask method)": [[1, "databricks.bundles.jobs.PythonWheelTask.as_dict", false]], "as_dict() (queuesettings method)": [[1, "databricks.bundles.jobs.QueueSettings.as_dict", false]], "as_dict() (rcranlibrary method)": [[1, "databricks.bundles.jobs.RCranLibrary.as_dict", false]], "as_dict() (runjobtask method)": [[1, "databricks.bundles.jobs.RunJobTask.as_dict", false]], "as_dict() (s3storageinfo method)": [[1, "databricks.bundles.jobs.S3StorageInfo.as_dict", false]], "as_dict() (sparkjartask method)": [[1, "databricks.bundles.jobs.SparkJarTask.as_dict", false]], "as_dict() (sparkpythontask method)": [[1, "databricks.bundles.jobs.SparkPythonTask.as_dict", false]], "as_dict() (sparksubmittask method)": [[1, "databricks.bundles.jobs.SparkSubmitTask.as_dict", false]], "as_dict() (sqltask method)": [[1, "databricks.bundles.jobs.SqlTask.as_dict", false]], "as_dict() (sqltaskalert method)": [[1, "databricks.bundles.jobs.SqlTaskAlert.as_dict", false]], "as_dict() (sqltaskdashboard method)": [[1, "databricks.bundles.jobs.SqlTaskDashboard.as_dict", false]], "as_dict() (sqltaskfile method)": [[1, "databricks.bundles.jobs.SqlTaskFile.as_dict", false]], "as_dict() (sqltaskquery method)": [[1, "databricks.bundles.jobs.SqlTaskQuery.as_dict", false]], "as_dict() (sqltasksubscription method)": [[1, "databricks.bundles.jobs.SqlTaskSubscription.as_dict", false]], "as_dict() (tableupdatetriggerconfiguration method)": [[1, "databricks.bundles.jobs.TableUpdateTriggerConfiguration.as_dict", false]], "as_dict() (task method)": [[1, "databricks.bundles.jobs.Task.as_dict", false]], "as_dict() (taskdependency method)": [[1, "databricks.bundles.jobs.TaskDependency.as_dict", false]], "as_dict() (taskemailnotifications method)": [[1, "databricks.bundles.jobs.TaskEmailNotifications.as_dict", false]], "as_dict() (tasknotificationsettings method)": [[1, "databricks.bundles.jobs.TaskNotificationSettings.as_dict", false]], "as_dict() (triggersettings method)": [[1, "databricks.bundles.jobs.TriggerSettings.as_dict", false]], "as_dict() (volumesstorageinfo method)": [[1, "databricks.bundles.jobs.VolumesStorageInfo.as_dict", false]], "as_dict() (webhook method)": [[1, "databricks.bundles.jobs.Webhook.as_dict", false]], "as_dict() (webhooknotifications method)": [[1, "databricks.bundles.jobs.WebhookNotifications.as_dict", false]], "as_dict() (workloadtype method)": [[1, "databricks.bundles.jobs.WorkloadType.as_dict", false]], "as_dict() (workspacestorageinfo method)": [[1, "databricks.bundles.jobs.WorkspaceStorageInfo.as_dict", false]], "at_least_one_failed (runif attribute)": [[1, "databricks.bundles.jobs.RunIf.AT_LEAST_ONE_FAILED", false]], "at_least_one_success (runif attribute)": [[1, "databricks.bundles.jobs.RunIf.AT_LEAST_ONE_SUCCESS", false]], "autoscale (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.AutoScale", false]], "autoscale (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.autoscale", false]], "autotermination_minutes (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.autotermination_minutes", false]], "availability (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.availability", false]], "availability (azureattributes attribute)": [[1, "databricks.bundles.jobs.AzureAttributes.availability", false]], "availability (gcpattributes attribute)": [[1, "databricks.bundles.jobs.GcpAttributes.availability", false]], "aws_attributes (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.aws_attributes", false]], "aws_code_commit (gitprovider attribute)": [[1, "databricks.bundles.jobs.GitProvider.AWS_CODE_COMMIT", false]], "awsattributes (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.AwsAttributes", false]], "awsavailability (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.AwsAvailability", false]], "azure_attributes (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.azure_attributes", false]], "azure_dev_ops_services (gitprovider attribute)": [[1, "databricks.bundles.jobs.GitProvider.AZURE_DEV_OPS_SERVICES", false]], "azureattributes (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.AzureAttributes", false]], "azureavailability (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.AzureAvailability", false]], "base_parameters (notebooktask attribute)": [[1, "databricks.bundles.jobs.NotebookTask.base_parameters", false]], "basic_auth (dockerimage attribute)": [[1, "databricks.bundles.jobs.DockerImage.basic_auth", false]], "bitbucket_cloud (gitprovider attribute)": [[1, "databricks.bundles.jobs.GitProvider.BITBUCKET_CLOUD", false]], "bitbucket_server (gitprovider attribute)": [[1, "databricks.bundles.jobs.GitProvider.BITBUCKET_SERVER", false]], "boot_disk_size (gcpattributes attribute)": [[1, "databricks.bundles.jobs.GcpAttributes.boot_disk_size", false]], "budget_policy_id (job attribute)": [[1, "databricks.bundles.jobs.Job.budget_policy_id", false]], "bundle (class in databricks.bundles.core)": [[0, "databricks.bundles.core.Bundle", false]], "canned_acl (s3storageinfo attribute)": [[1, "databricks.bundles.jobs.S3StorageInfo.canned_acl", false]], "catalog (dbttask attribute)": [[1, "databricks.bundles.jobs.DbtTask.catalog", false]], "client (environment attribute)": [[1, "databricks.bundles.jobs.Environment.client", false]], "clients (workloadtype attribute)": [[1, "databricks.bundles.jobs.WorkloadType.clients", false]], "clientstypes (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.ClientsTypes", false]], "cluster_log_conf (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.cluster_log_conf", false]], "cluster_name (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.cluster_name", false]], "clusterlogconf (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.ClusterLogConf", false]], "clusterspec (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.ClusterSpec", false]], "column (location attribute)": [[0, "databricks.bundles.core.Location.column", false]], "commands (dbttask attribute)": [[1, "databricks.bundles.jobs.DbtTask.commands", false]], "concurrency (foreachtask attribute)": [[1, "databricks.bundles.jobs.ForEachTask.concurrency", false]], "condition (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Condition", false]], "condition (tableupdatetriggerconfiguration attribute)": [[1, "databricks.bundles.jobs.TableUpdateTriggerConfiguration.condition", false]], "condition_task (task attribute)": [[1, "databricks.bundles.jobs.Task.condition_task", false]], "conditiontask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.ConditionTask", false]], "conditiontaskop (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.ConditionTaskOp", false]], "continuous (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Continuous", false]], "continuous (job attribute)": [[1, "databricks.bundles.jobs.Job.continuous", false]], "coordinates (mavenlibrary attribute)": [[1, "databricks.bundles.jobs.MavenLibrary.coordinates", false]], "cran (library attribute)": [[1, "databricks.bundles.jobs.Library.cran", false]], "create_error() (diagnostics class method)": [[0, "databricks.bundles.core.Diagnostics.create_error", false]], "create_warning() (diagnostics class method)": [[0, "databricks.bundles.core.Diagnostics.create_warning", false]], "cronschedule (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.CronSchedule", false]], "custom_subject (sqltaskdashboard attribute)": [[1, "databricks.bundles.jobs.SqlTaskDashboard.custom_subject", false]], "custom_tags (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.custom_tags", false]], "dashboard (sqltask attribute)": [[1, "databricks.bundles.jobs.SqlTask.dashboard", false]], "dashboard_id (sqltaskdashboard attribute)": [[1, "databricks.bundles.jobs.SqlTaskDashboard.dashboard_id", false]], "data_security_mode (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.data_security_mode", false]], "data_security_mode_auto (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.DATA_SECURITY_MODE_AUTO", false]], "data_security_mode_dedicated (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.DATA_SECURITY_MODE_DEDICATED", false]], "data_security_mode_standard (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.DATA_SECURITY_MODE_STANDARD", false]], "datasecuritymode (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.DataSecurityMode", false]], "days (periodictriggerconfigurationtimeunit attribute)": [[1, "databricks.bundles.jobs.PeriodicTriggerConfigurationTimeUnit.DAYS", false]], "dbfs (clusterlogconf attribute)": [[1, "databricks.bundles.jobs.ClusterLogConf.dbfs", false]], "dbfs (initscriptinfo attribute)": [[1, "databricks.bundles.jobs.InitScriptInfo.dbfs", false]], "dbfsstorageinfo (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.DbfsStorageInfo", false]], "dbt_task (task attribute)": [[1, "databricks.bundles.jobs.Task.dbt_task", false]], "dbttask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.DbtTask", false]], "default (jobparameterdefinition attribute)": [[1, "databricks.bundles.jobs.JobParameterDefinition.default", false]], "dependencies (environment attribute)": [[1, "databricks.bundles.jobs.Environment.dependencies", false]], "depends_on (task attribute)": [[1, "databricks.bundles.jobs.Task.depends_on", false]], "description (job attribute)": [[1, "databricks.bundles.jobs.Job.description", false]], "description (task attribute)": [[1, "databricks.bundles.jobs.Task.description", false]], "destination (adlsgen2info attribute)": [[1, "databricks.bundles.jobs.Adlsgen2Info.destination", false]], "destination (dbfsstorageinfo attribute)": [[1, "databricks.bundles.jobs.DbfsStorageInfo.destination", false]], "destination (gcsstorageinfo attribute)": [[1, "databricks.bundles.jobs.GcsStorageInfo.destination", false]], "destination (localfileinfo attribute)": [[1, "databricks.bundles.jobs.LocalFileInfo.destination", false]], "destination (s3storageinfo attribute)": [[1, "databricks.bundles.jobs.S3StorageInfo.destination", false]], "destination (volumesstorageinfo attribute)": [[1, "databricks.bundles.jobs.VolumesStorageInfo.destination", false]], "destination (workspacestorageinfo attribute)": [[1, "databricks.bundles.jobs.WorkspaceStorageInfo.destination", false]], "destination_id (sqltasksubscription attribute)": [[1, "databricks.bundles.jobs.SqlTaskSubscription.destination_id", false]], "detail (diagnostic attribute)": [[0, "databricks.bundles.core.Diagnostic.detail", false]], "diagnostic (class in databricks.bundles.core)": [[0, "databricks.bundles.core.Diagnostic", false]], "diagnostics (class in databricks.bundles.core)": [[0, "databricks.bundles.core.Diagnostics", false]], "diagnostics (resources property)": [[0, "databricks.bundles.core.Resources.diagnostics", false]], "disable_auto_optimization (task attribute)": [[1, "databricks.bundles.jobs.Task.disable_auto_optimization", false]], "docker_image (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.docker_image", false]], "dockerbasicauth (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.DockerBasicAuth", false]], "dockerimage (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.DockerImage", false]], "driver_instance_pool_id (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.driver_instance_pool_id", false]], "driver_node_type_id (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.driver_node_type_id", false]], "ebs_volume_count (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.ebs_volume_count", false]], "ebs_volume_iops (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.ebs_volume_iops", false]], "ebs_volume_size (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.ebs_volume_size", false]], "ebs_volume_throughput (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.ebs_volume_throughput", false]], "ebs_volume_type (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.ebs_volume_type", false]], "ebsvolumetype (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.EbsVolumeType", false]], "egg (library attribute)": [[1, "databricks.bundles.jobs.Library.egg", false]], "email_notifications (job attribute)": [[1, "databricks.bundles.jobs.Job.email_notifications", false]], "email_notifications (task attribute)": [[1, "databricks.bundles.jobs.Task.email_notifications", false]], "enable_elastic_disk (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.enable_elastic_disk", false]], "enable_encryption (s3storageinfo attribute)": [[1, "databricks.bundles.jobs.S3StorageInfo.enable_encryption", false]], "enable_local_disk_encryption (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.enable_local_disk_encryption", false]], "enabled (queuesettings attribute)": [[1, "databricks.bundles.jobs.QueueSettings.enabled", false]], "encryption_type (s3storageinfo attribute)": [[1, "databricks.bundles.jobs.S3StorageInfo.encryption_type", false]], "endpoint (s3storageinfo attribute)": [[1, "databricks.bundles.jobs.S3StorageInfo.endpoint", false]], "entry_point (pythonwheeltask attribute)": [[1, "databricks.bundles.jobs.PythonWheelTask.entry_point", false]], "environment (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Environment", false]], "environment_key (jobenvironment attribute)": [[1, "databricks.bundles.jobs.JobEnvironment.environment_key", false]], "environment_key (task attribute)": [[1, "databricks.bundles.jobs.Task.environment_key", false]], "environments (job attribute)": [[1, "databricks.bundles.jobs.Job.environments", false]], "equal_to (conditiontaskop attribute)": [[1, "databricks.bundles.jobs.ConditionTaskOp.EQUAL_TO", false]], "error (severity attribute)": [[0, "databricks.bundles.core.Severity.ERROR", false]], "exclusions (mavenlibrary attribute)": [[1, "databricks.bundles.jobs.MavenLibrary.exclusions", false]], "existing_cluster_id (task attribute)": [[1, "databricks.bundles.jobs.Task.existing_cluster_id", false]], "extend() (diagnostics method)": [[0, "databricks.bundles.core.Diagnostics.extend", false]], "extend_tuple() (diagnostics method)": [[0, "databricks.bundles.core.Diagnostics.extend_tuple", false]], "file (initscriptinfo attribute)": [[1, "databricks.bundles.jobs.InitScriptInfo.file", false]], "file (location attribute)": [[0, "databricks.bundles.core.Location.file", false]], "file (sqltask attribute)": [[1, "databricks.bundles.jobs.SqlTask.file", false]], "file_arrival (triggersettings attribute)": [[1, "databricks.bundles.jobs.TriggerSettings.file_arrival", false]], "filearrivaltriggerconfiguration (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.FileArrivalTriggerConfiguration", false]], "first_on_demand (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.first_on_demand", false]], "first_on_demand (azureattributes attribute)": [[1, "databricks.bundles.jobs.AzureAttributes.first_on_demand", false]], "for_each_task (task attribute)": [[1, "databricks.bundles.jobs.Task.for_each_task", false]], "foreachtask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.ForEachTask", false]], "from_callable() (location static method)": [[0, "databricks.bundles.core.Location.from_callable", false]], "from_dict() (adlsgen2info class method)": [[1, "databricks.bundles.jobs.Adlsgen2Info.from_dict", false]], "from_dict() (autoscale class method)": [[1, "databricks.bundles.jobs.AutoScale.from_dict", false]], "from_dict() (awsattributes class method)": [[1, "databricks.bundles.jobs.AwsAttributes.from_dict", false]], "from_dict() (azureattributes class method)": [[1, "databricks.bundles.jobs.AzureAttributes.from_dict", false]], "from_dict() (clientstypes class method)": [[1, "databricks.bundles.jobs.ClientsTypes.from_dict", false]], "from_dict() (clusterlogconf class method)": [[1, "databricks.bundles.jobs.ClusterLogConf.from_dict", false]], "from_dict() (clusterspec class method)": [[1, "databricks.bundles.jobs.ClusterSpec.from_dict", false]], "from_dict() (conditiontask class method)": [[1, "databricks.bundles.jobs.ConditionTask.from_dict", false]], "from_dict() (continuous class method)": [[1, "databricks.bundles.jobs.Continuous.from_dict", false]], "from_dict() (cronschedule class method)": [[1, "databricks.bundles.jobs.CronSchedule.from_dict", false]], "from_dict() (dbfsstorageinfo class method)": [[1, "databricks.bundles.jobs.DbfsStorageInfo.from_dict", false]], "from_dict() (dbttask class method)": [[1, "databricks.bundles.jobs.DbtTask.from_dict", false]], "from_dict() (dockerbasicauth class method)": [[1, "databricks.bundles.jobs.DockerBasicAuth.from_dict", false]], "from_dict() (dockerimage class method)": [[1, "databricks.bundles.jobs.DockerImage.from_dict", false]], "from_dict() (environment class method)": [[1, "databricks.bundles.jobs.Environment.from_dict", false]], "from_dict() (filearrivaltriggerconfiguration class method)": [[1, "databricks.bundles.jobs.FileArrivalTriggerConfiguration.from_dict", false]], "from_dict() (foreachtask class method)": [[1, "databricks.bundles.jobs.ForEachTask.from_dict", false]], "from_dict() (gcpattributes class method)": [[1, "databricks.bundles.jobs.GcpAttributes.from_dict", false]], "from_dict() (gcsstorageinfo class method)": [[1, "databricks.bundles.jobs.GcsStorageInfo.from_dict", false]], "from_dict() (gitsnapshot class method)": [[1, "databricks.bundles.jobs.GitSnapshot.from_dict", false]], "from_dict() (gitsource class method)": [[1, "databricks.bundles.jobs.GitSource.from_dict", false]], "from_dict() (initscriptinfo class method)": [[1, "databricks.bundles.jobs.InitScriptInfo.from_dict", false]], "from_dict() (job class method)": [[1, "databricks.bundles.jobs.Job.from_dict", false]], "from_dict() (jobcluster class method)": [[1, "databricks.bundles.jobs.JobCluster.from_dict", false]], "from_dict() (jobemailnotifications class method)": [[1, "databricks.bundles.jobs.JobEmailNotifications.from_dict", false]], "from_dict() (jobenvironment class method)": [[1, "databricks.bundles.jobs.JobEnvironment.from_dict", false]], "from_dict() (jobnotificationsettings class method)": [[1, "databricks.bundles.jobs.JobNotificationSettings.from_dict", false]], "from_dict() (jobparameterdefinition class method)": [[1, "databricks.bundles.jobs.JobParameterDefinition.from_dict", false]], "from_dict() (jobrunas class method)": [[1, "databricks.bundles.jobs.JobRunAs.from_dict", false]], "from_dict() (jobshealthrule class method)": [[1, "databricks.bundles.jobs.JobsHealthRule.from_dict", false]], "from_dict() (jobshealthrules class method)": [[1, "databricks.bundles.jobs.JobsHealthRules.from_dict", false]], "from_dict() (library class method)": [[1, "databricks.bundles.jobs.Library.from_dict", false]], "from_dict() (localfileinfo class method)": [[1, "databricks.bundles.jobs.LocalFileInfo.from_dict", false]], "from_dict() (loganalyticsinfo class method)": [[1, "databricks.bundles.jobs.LogAnalyticsInfo.from_dict", false]], "from_dict() (mavenlibrary class method)": [[1, "databricks.bundles.jobs.MavenLibrary.from_dict", false]], "from_dict() (notebooktask class method)": [[1, "databricks.bundles.jobs.NotebookTask.from_dict", false]], "from_dict() (periodictriggerconfiguration class method)": [[1, "databricks.bundles.jobs.PeriodicTriggerConfiguration.from_dict", false]], "from_dict() (permission class method)": [[1, "databricks.bundles.jobs.Permission.from_dict", false]], "from_dict() (pipelinetask class method)": [[1, "databricks.bundles.jobs.PipelineTask.from_dict", false]], "from_dict() (pythonpypilibrary class method)": [[1, "databricks.bundles.jobs.PythonPyPiLibrary.from_dict", false]], "from_dict() (pythonwheeltask class method)": [[1, "databricks.bundles.jobs.PythonWheelTask.from_dict", false]], "from_dict() (queuesettings class method)": [[1, "databricks.bundles.jobs.QueueSettings.from_dict", false]], "from_dict() (rcranlibrary class method)": [[1, "databricks.bundles.jobs.RCranLibrary.from_dict", false]], "from_dict() (runjobtask class method)": [[1, "databricks.bundles.jobs.RunJobTask.from_dict", false]], "from_dict() (s3storageinfo class method)": [[1, "databricks.bundles.jobs.S3StorageInfo.from_dict", false]], "from_dict() (sparkjartask class method)": [[1, "databricks.bundles.jobs.SparkJarTask.from_dict", false]], "from_dict() (sparkpythontask class method)": [[1, "databricks.bundles.jobs.SparkPythonTask.from_dict", false]], "from_dict() (sparksubmittask class method)": [[1, "databricks.bundles.jobs.SparkSubmitTask.from_dict", false]], "from_dict() (sqltask class method)": [[1, "databricks.bundles.jobs.SqlTask.from_dict", false]], "from_dict() (sqltaskalert class method)": [[1, "databricks.bundles.jobs.SqlTaskAlert.from_dict", false]], "from_dict() (sqltaskdashboard class method)": [[1, "databricks.bundles.jobs.SqlTaskDashboard.from_dict", false]], "from_dict() (sqltaskfile class method)": [[1, "databricks.bundles.jobs.SqlTaskFile.from_dict", false]], "from_dict() (sqltaskquery class method)": [[1, "databricks.bundles.jobs.SqlTaskQuery.from_dict", false]], "from_dict() (sqltasksubscription class method)": [[1, "databricks.bundles.jobs.SqlTaskSubscription.from_dict", false]], "from_dict() (tableupdatetriggerconfiguration class method)": [[1, "databricks.bundles.jobs.TableUpdateTriggerConfiguration.from_dict", false]], "from_dict() (task class method)": [[1, "databricks.bundles.jobs.Task.from_dict", false]], "from_dict() (taskdependency class method)": [[1, "databricks.bundles.jobs.TaskDependency.from_dict", false]], "from_dict() (taskemailnotifications class method)": [[1, "databricks.bundles.jobs.TaskEmailNotifications.from_dict", false]], "from_dict() (tasknotificationsettings class method)": [[1, "databricks.bundles.jobs.TaskNotificationSettings.from_dict", false]], "from_dict() (triggersettings class method)": [[1, "databricks.bundles.jobs.TriggerSettings.from_dict", false]], "from_dict() (volumesstorageinfo class method)": [[1, "databricks.bundles.jobs.VolumesStorageInfo.from_dict", false]], "from_dict() (webhook class method)": [[1, "databricks.bundles.jobs.Webhook.from_dict", false]], "from_dict() (webhooknotifications class method)": [[1, "databricks.bundles.jobs.WebhookNotifications.from_dict", false]], "from_dict() (workloadtype class method)": [[1, "databricks.bundles.jobs.WorkloadType.from_dict", false]], "from_dict() (workspacestorageinfo class method)": [[1, "databricks.bundles.jobs.WorkspaceStorageInfo.from_dict", false]], "from_exception() (diagnostics class method)": [[0, "databricks.bundles.core.Diagnostics.from_exception", false]], "from_stack_frame() (location static method)": [[0, "databricks.bundles.core.Location.from_stack_frame", false]], "full_refresh (pipelinetask attribute)": [[1, "databricks.bundles.jobs.PipelineTask.full_refresh", false]], "function (resourcemutator attribute)": [[0, "databricks.bundles.core.ResourceMutator.function", false]], "gcp_attributes (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.gcp_attributes", false]], "gcpattributes (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.GcpAttributes", false]], "gcpavailability (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.GcpAvailability", false]], "gcs (initscriptinfo attribute)": [[1, "databricks.bundles.jobs.InitScriptInfo.gcs", false]], "gcsstorageinfo (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.GcsStorageInfo", false]], "general_purpose_ssd (ebsvolumetype attribute)": [[1, "databricks.bundles.jobs.EbsVolumeType.GENERAL_PURPOSE_SSD", false]], "git (source attribute)": [[1, "databricks.bundles.jobs.Source.GIT", false]], "git_branch (gitsource attribute)": [[1, "databricks.bundles.jobs.GitSource.git_branch", false]], "git_commit (gitsource attribute)": [[1, "databricks.bundles.jobs.GitSource.git_commit", false]], "git_hub (gitprovider attribute)": [[1, "databricks.bundles.jobs.GitProvider.GIT_HUB", false]], "git_hub_enterprise (gitprovider attribute)": [[1, "databricks.bundles.jobs.GitProvider.GIT_HUB_ENTERPRISE", false]], "git_lab (gitprovider attribute)": [[1, "databricks.bundles.jobs.GitProvider.GIT_LAB", false]], "git_lab_enterprise_edition (gitprovider attribute)": [[1, "databricks.bundles.jobs.GitProvider.GIT_LAB_ENTERPRISE_EDITION", false]], "git_provider (gitsource attribute)": [[1, "databricks.bundles.jobs.GitSource.git_provider", false]], "git_source (job attribute)": [[1, "databricks.bundles.jobs.Job.git_source", false]], "git_tag (gitsource attribute)": [[1, "databricks.bundles.jobs.GitSource.git_tag", false]], "git_url (gitsource attribute)": [[1, "databricks.bundles.jobs.GitSource.git_url", false]], "gitprovider (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.GitProvider", false]], "gitsnapshot (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.GitSnapshot", false]], "gitsource (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.GitSource", false]], "google_service_account (gcpattributes attribute)": [[1, "databricks.bundles.jobs.GcpAttributes.google_service_account", false]], "greater_than (conditiontaskop attribute)": [[1, "databricks.bundles.jobs.ConditionTaskOp.GREATER_THAN", false]], "greater_than (jobshealthoperator attribute)": [[1, "databricks.bundles.jobs.JobsHealthOperator.GREATER_THAN", false]], "greater_than_or_equal (conditiontaskop attribute)": [[1, "databricks.bundles.jobs.ConditionTaskOp.GREATER_THAN_OR_EQUAL", false]], "group_name (permission attribute)": [[1, "databricks.bundles.jobs.Permission.group_name", false]], "has_error() (diagnostics method)": [[0, "databricks.bundles.core.Diagnostics.has_error", false]], "health (job attribute)": [[1, "databricks.bundles.jobs.Job.health", false]], "health (task attribute)": [[1, "databricks.bundles.jobs.Task.health", false]], "hours (periodictriggerconfigurationtimeunit attribute)": [[1, "databricks.bundles.jobs.PeriodicTriggerConfigurationTimeUnit.HOURS", false]], "id (webhook attribute)": [[1, "databricks.bundles.jobs.Webhook.id", false]], "init_scripts (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.init_scripts", false]], "initscriptinfo (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.InitScriptInfo", false]], "inputs (foreachtask attribute)": [[1, "databricks.bundles.jobs.ForEachTask.inputs", false]], "instance_pool_id (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.instance_pool_id", false]], "instance_profile_arn (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.instance_profile_arn", false]], "interval (periodictriggerconfiguration attribute)": [[1, "databricks.bundles.jobs.PeriodicTriggerConfiguration.interval", false]], "is_single_node (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.is_single_node", false]], "items (diagnostics attribute)": [[0, "databricks.bundles.core.Diagnostics.items", false]], "jar (library attribute)": [[1, "databricks.bundles.jobs.Library.jar", false]], "jar_uri (sparkjartask attribute)": [[1, "databricks.bundles.jobs.SparkJarTask.jar_uri", false]], "job (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Job", false]], "job_cluster_key (jobcluster attribute)": [[1, "databricks.bundles.jobs.JobCluster.job_cluster_key", false]], "job_cluster_key (task attribute)": [[1, "databricks.bundles.jobs.Task.job_cluster_key", false]], "job_clusters (job attribute)": [[1, "databricks.bundles.jobs.Job.job_clusters", false]], "job_id (runjobtask attribute)": [[1, "databricks.bundles.jobs.RunJobTask.job_id", false]], "job_mutator() (in module databricks.bundles.core)": [[0, "databricks.bundles.core.job_mutator", false]], "job_parameters (runjobtask attribute)": [[1, "databricks.bundles.jobs.RunJobTask.job_parameters", false]], "jobcluster (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobCluster", false]], "jobemailnotifications (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobEmailNotifications", false]], "jobenvironment (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobEnvironment", false]], "jobnotificationsettings (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobNotificationSettings", false]], "jobparameterdefinition (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobParameterDefinition", false]], "jobrunas (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobRunAs", false]], "jobs (clientstypes attribute)": [[1, "databricks.bundles.jobs.ClientsTypes.jobs", false]], "jobs (resources property)": [[0, "databricks.bundles.core.Resources.jobs", false]], "jobshealthmetric (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobsHealthMetric", false]], "jobshealthoperator (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobsHealthOperator", false]], "jobshealthrule (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobsHealthRule", false]], "jobshealthrules (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.JobsHealthRules", false]], "kms_key (s3storageinfo attribute)": [[1, "databricks.bundles.jobs.S3StorageInfo.kms_key", false]], "left (conditiontask attribute)": [[1, "databricks.bundles.jobs.ConditionTask.left", false]], "legacy_passthrough (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.LEGACY_PASSTHROUGH", false]], "legacy_single_user (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.LEGACY_SINGLE_USER", false]], "legacy_single_user_standard (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.LEGACY_SINGLE_USER_STANDARD", false]], "legacy_table_acl (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.LEGACY_TABLE_ACL", false]], "less_than (conditiontaskop attribute)": [[1, "databricks.bundles.jobs.ConditionTaskOp.LESS_THAN", false]], "less_than_or_equal (conditiontaskop attribute)": [[1, "databricks.bundles.jobs.ConditionTaskOp.LESS_THAN_OR_EQUAL", false]], "level (permission attribute)": [[1, "databricks.bundles.jobs.Permission.level", false]], "libraries (task attribute)": [[1, "databricks.bundles.jobs.Task.libraries", false]], "library (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Library", false]], "line (location attribute)": [[0, "databricks.bundles.core.Location.line", false]], "load_resources_from_current_package_module() (core method)": [[0, "databricks.bundles.core.load_resources_from_current_package_module", false]], "load_resources_from_module() (core method)": [[0, "databricks.bundles.core.load_resources_from_module", false]], "load_resources_from_modules() (core method)": [[0, "databricks.bundles.core.load_resources_from_modules", false]], "load_resources_from_package_module() (core method)": [[0, "databricks.bundles.core.load_resources_from_package_module", false]], "local_ssd_count (gcpattributes attribute)": [[1, "databricks.bundles.jobs.GcpAttributes.local_ssd_count", false]], "localfileinfo (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.LocalFileInfo", false]], "location (class in databricks.bundles.core)": [[0, "databricks.bundles.core.Location", false]], "location (diagnostic attribute)": [[0, "databricks.bundles.core.Diagnostic.location", false]], "log_analytics_info (azureattributes attribute)": [[1, "databricks.bundles.jobs.AzureAttributes.log_analytics_info", false]], "log_analytics_primary_key (loganalyticsinfo attribute)": [[1, "databricks.bundles.jobs.LogAnalyticsInfo.log_analytics_primary_key", false]], "log_analytics_workspace_id (loganalyticsinfo attribute)": [[1, "databricks.bundles.jobs.LogAnalyticsInfo.log_analytics_workspace_id", false]], "loganalyticsinfo (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.LogAnalyticsInfo", false]], "main_class_name (sparkjartask attribute)": [[1, "databricks.bundles.jobs.SparkJarTask.main_class_name", false]], "maven (library attribute)": [[1, "databricks.bundles.jobs.Library.maven", false]], "mavenlibrary (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.MavenLibrary", false]], "max_concurrent_runs (job attribute)": [[1, "databricks.bundles.jobs.Job.max_concurrent_runs", false]], "max_retries (task attribute)": [[1, "databricks.bundles.jobs.Task.max_retries", false]], "max_workers (autoscale attribute)": [[1, "databricks.bundles.jobs.AutoScale.max_workers", false]], "metric (jobshealthrule attribute)": [[1, "databricks.bundles.jobs.JobsHealthRule.metric", false]], "min_retry_interval_millis (task attribute)": [[1, "databricks.bundles.jobs.Task.min_retry_interval_millis", false]], "min_time_between_triggers_seconds (filearrivaltriggerconfiguration attribute)": [[1, "databricks.bundles.jobs.FileArrivalTriggerConfiguration.min_time_between_triggers_seconds", false]], "min_time_between_triggers_seconds (tableupdatetriggerconfiguration attribute)": [[1, "databricks.bundles.jobs.TableUpdateTriggerConfiguration.min_time_between_triggers_seconds", false]], "min_workers (autoscale attribute)": [[1, "databricks.bundles.jobs.AutoScale.min_workers", false]], "name (job attribute)": [[1, "databricks.bundles.jobs.Job.name", false]], "name (jobparameterdefinition attribute)": [[1, "databricks.bundles.jobs.JobParameterDefinition.name", false]], "named_parameters (pythonwheeltask attribute)": [[1, "databricks.bundles.jobs.PythonWheelTask.named_parameters", false]], "new_cluster (jobcluster attribute)": [[1, "databricks.bundles.jobs.JobCluster.new_cluster", false]], "new_cluster (task attribute)": [[1, "databricks.bundles.jobs.Task.new_cluster", false]], "no_alert_for_canceled_runs (jobnotificationsettings attribute)": [[1, "databricks.bundles.jobs.JobNotificationSettings.no_alert_for_canceled_runs", false]], "no_alert_for_canceled_runs (tasknotificationsettings attribute)": [[1, "databricks.bundles.jobs.TaskNotificationSettings.no_alert_for_canceled_runs", false]], "no_alert_for_skipped_runs (jobemailnotifications attribute)": [[1, "databricks.bundles.jobs.JobEmailNotifications.no_alert_for_skipped_runs", false]], "no_alert_for_skipped_runs (jobnotificationsettings attribute)": [[1, "databricks.bundles.jobs.JobNotificationSettings.no_alert_for_skipped_runs", false]], "no_alert_for_skipped_runs (taskemailnotifications attribute)": [[1, "databricks.bundles.jobs.TaskEmailNotifications.no_alert_for_skipped_runs", false]], "no_alert_for_skipped_runs (tasknotificationsettings attribute)": [[1, "databricks.bundles.jobs.TaskNotificationSettings.no_alert_for_skipped_runs", false]], "node_type_id (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.node_type_id", false]], "none (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.NONE", false]], "none_failed (runif attribute)": [[1, "databricks.bundles.jobs.RunIf.NONE_FAILED", false]], "not_equal (conditiontaskop attribute)": [[1, "databricks.bundles.jobs.ConditionTaskOp.NOT_EQUAL", false]], "notebook_path (notebooktask attribute)": [[1, "databricks.bundles.jobs.NotebookTask.notebook_path", false]], "notebook_task (task attribute)": [[1, "databricks.bundles.jobs.Task.notebook_task", false]], "notebooks (clientstypes attribute)": [[1, "databricks.bundles.jobs.ClientsTypes.notebooks", false]], "notebooktask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.NotebookTask", false]], "notification_settings (job attribute)": [[1, "databricks.bundles.jobs.Job.notification_settings", false]], "notification_settings (task attribute)": [[1, "databricks.bundles.jobs.Task.notification_settings", false]], "null (runtimeengine attribute)": [[1, "databricks.bundles.jobs.RuntimeEngine.NULL", false]], "num_workers (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.num_workers", false]], "on_demand (awsavailability attribute)": [[1, "databricks.bundles.jobs.AwsAvailability.ON_DEMAND", false]], "on_demand_azure (azureavailability attribute)": [[1, "databricks.bundles.jobs.AzureAvailability.ON_DEMAND_AZURE", false]], "on_demand_gcp (gcpavailability attribute)": [[1, "databricks.bundles.jobs.GcpAvailability.ON_DEMAND_GCP", false]], "on_duration_warning_threshold_exceeded (jobemailnotifications attribute)": [[1, "databricks.bundles.jobs.JobEmailNotifications.on_duration_warning_threshold_exceeded", false]], "on_duration_warning_threshold_exceeded (taskemailnotifications attribute)": [[1, "databricks.bundles.jobs.TaskEmailNotifications.on_duration_warning_threshold_exceeded", false]], "on_duration_warning_threshold_exceeded (webhooknotifications attribute)": [[1, "databricks.bundles.jobs.WebhookNotifications.on_duration_warning_threshold_exceeded", false]], "on_failure (jobemailnotifications attribute)": [[1, "databricks.bundles.jobs.JobEmailNotifications.on_failure", false]], "on_failure (taskemailnotifications attribute)": [[1, "databricks.bundles.jobs.TaskEmailNotifications.on_failure", false]], "on_failure (webhooknotifications attribute)": [[1, "databricks.bundles.jobs.WebhookNotifications.on_failure", false]], "on_start (jobemailnotifications attribute)": [[1, "databricks.bundles.jobs.JobEmailNotifications.on_start", false]], "on_start (taskemailnotifications attribute)": [[1, "databricks.bundles.jobs.TaskEmailNotifications.on_start", false]], "on_start (webhooknotifications attribute)": [[1, "databricks.bundles.jobs.WebhookNotifications.on_start", false]], "on_streaming_backlog_exceeded (jobemailnotifications attribute)": [[1, "databricks.bundles.jobs.JobEmailNotifications.on_streaming_backlog_exceeded", false]], "on_streaming_backlog_exceeded (taskemailnotifications attribute)": [[1, "databricks.bundles.jobs.TaskEmailNotifications.on_streaming_backlog_exceeded", false]], "on_streaming_backlog_exceeded (webhooknotifications attribute)": [[1, "databricks.bundles.jobs.WebhookNotifications.on_streaming_backlog_exceeded", false]], "on_success (jobemailnotifications attribute)": [[1, "databricks.bundles.jobs.JobEmailNotifications.on_success", false]], "on_success (taskemailnotifications attribute)": [[1, "databricks.bundles.jobs.TaskEmailNotifications.on_success", false]], "on_success (webhooknotifications attribute)": [[1, "databricks.bundles.jobs.WebhookNotifications.on_success", false]], "op (conditiontask attribute)": [[1, "databricks.bundles.jobs.ConditionTask.op", false]], "op (jobshealthrule attribute)": [[1, "databricks.bundles.jobs.JobsHealthRule.op", false]], "outcome (taskdependency attribute)": [[1, "databricks.bundles.jobs.TaskDependency.outcome", false]], "package (pythonpypilibrary attribute)": [[1, "databricks.bundles.jobs.PythonPyPiLibrary.package", false]], "package (rcranlibrary attribute)": [[1, "databricks.bundles.jobs.RCranLibrary.package", false]], "package_name (pythonwheeltask attribute)": [[1, "databricks.bundles.jobs.PythonWheelTask.package_name", false]], "parameters (job attribute)": [[1, "databricks.bundles.jobs.Job.parameters", false]], "parameters (pythonwheeltask attribute)": [[1, "databricks.bundles.jobs.PythonWheelTask.parameters", false]], "parameters (sparkjartask attribute)": [[1, "databricks.bundles.jobs.SparkJarTask.parameters", false]], "parameters (sparkpythontask attribute)": [[1, "databricks.bundles.jobs.SparkPythonTask.parameters", false]], "parameters (sparksubmittask attribute)": [[1, "databricks.bundles.jobs.SparkSubmitTask.parameters", false]], "parameters (sqltask attribute)": [[1, "databricks.bundles.jobs.SqlTask.parameters", false]], "password (dockerbasicauth attribute)": [[1, "databricks.bundles.jobs.DockerBasicAuth.password", false]], "path (diagnostic attribute)": [[0, "databricks.bundles.core.Diagnostic.path", false]], "path (sqltaskfile attribute)": [[1, "databricks.bundles.jobs.SqlTaskFile.path", false]], "path (variable attribute)": [[0, "databricks.bundles.core.Variable.path", false]], "pause_status (continuous attribute)": [[1, "databricks.bundles.jobs.Continuous.pause_status", false]], "pause_status (cronschedule attribute)": [[1, "databricks.bundles.jobs.CronSchedule.pause_status", false]], "pause_status (triggersettings attribute)": [[1, "databricks.bundles.jobs.TriggerSettings.pause_status", false]], "pause_subscriptions (sqltaskalert attribute)": [[1, "databricks.bundles.jobs.SqlTaskAlert.pause_subscriptions", false]], "pause_subscriptions (sqltaskdashboard attribute)": [[1, "databricks.bundles.jobs.SqlTaskDashboard.pause_subscriptions", false]], "paused (pausestatus attribute)": [[1, "databricks.bundles.jobs.PauseStatus.PAUSED", false]], "pausestatus (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.PauseStatus", false]], "periodic (triggersettings attribute)": [[1, "databricks.bundles.jobs.TriggerSettings.periodic", false]], "periodictriggerconfiguration (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.PeriodicTriggerConfiguration", false]], "periodictriggerconfigurationtimeunit (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.PeriodicTriggerConfigurationTimeUnit", false]], "permission (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Permission", false]], "permissions (job attribute)": [[1, "databricks.bundles.jobs.Job.permissions", false]], "photon (runtimeengine attribute)": [[1, "databricks.bundles.jobs.RuntimeEngine.PHOTON", false]], "pipeline_id (pipelinetask attribute)": [[1, "databricks.bundles.jobs.PipelineTask.pipeline_id", false]], "pipeline_task (task attribute)": [[1, "databricks.bundles.jobs.Task.pipeline_task", false]], "pipelinetask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.PipelineTask", false]], "policy_id (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.policy_id", false]], "preemptible_gcp (gcpavailability attribute)": [[1, "databricks.bundles.jobs.GcpAvailability.PREEMPTIBLE_GCP", false]], "preemptible_with_fallback_gcp (gcpavailability attribute)": [[1, "databricks.bundles.jobs.GcpAvailability.PREEMPTIBLE_WITH_FALLBACK_GCP", false]], "profiles_directory (dbttask attribute)": [[1, "databricks.bundles.jobs.DbtTask.profiles_directory", false]], "project_directory (dbttask attribute)": [[1, "databricks.bundles.jobs.DbtTask.project_directory", false]], "pypi (library attribute)": [[1, "databricks.bundles.jobs.Library.pypi", false]], "python_file (sparkpythontask attribute)": [[1, "databricks.bundles.jobs.SparkPythonTask.python_file", false]], "python_wheel_task (task attribute)": [[1, "databricks.bundles.jobs.Task.python_wheel_task", false]], "pythonpypilibrary (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.PythonPyPiLibrary", false]], "pythonwheeltask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.PythonWheelTask", false]], "quartz_cron_expression (cronschedule attribute)": [[1, "databricks.bundles.jobs.CronSchedule.quartz_cron_expression", false]], "query (sqltask attribute)": [[1, "databricks.bundles.jobs.SqlTask.query", false]], "query_id (sqltaskquery attribute)": [[1, "databricks.bundles.jobs.SqlTaskQuery.query_id", false]], "queue (job attribute)": [[1, "databricks.bundles.jobs.Job.queue", false]], "queuesettings (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.QueueSettings", false]], "rcranlibrary (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.RCranLibrary", false]], "region (s3storageinfo attribute)": [[1, "databricks.bundles.jobs.S3StorageInfo.region", false]], "repo (mavenlibrary attribute)": [[1, "databricks.bundles.jobs.MavenLibrary.repo", false]], "repo (pythonpypilibrary attribute)": [[1, "databricks.bundles.jobs.PythonPyPiLibrary.repo", false]], "repo (rcranlibrary attribute)": [[1, "databricks.bundles.jobs.RCranLibrary.repo", false]], "requirements (library attribute)": [[1, "databricks.bundles.jobs.Library.requirements", false]], "resolve_variable() (bundle method)": [[0, "databricks.bundles.core.Bundle.resolve_variable", false]], "resolve_variable_list() (bundle method)": [[0, "databricks.bundles.core.Bundle.resolve_variable_list", false]], "resource (class in databricks.bundles.core)": [[0, "databricks.bundles.core.Resource", false]], "resource_type (resourcemutator attribute)": [[0, "databricks.bundles.core.ResourceMutator.resource_type", false]], "resourcemutator (class in databricks.bundles.core)": [[0, "databricks.bundles.core.ResourceMutator", false]], "resources (class in databricks.bundles.core)": [[0, "databricks.bundles.core.Resources", false]], "retry_on_timeout (task attribute)": [[1, "databricks.bundles.jobs.Task.retry_on_timeout", false]], "right (conditiontask attribute)": [[1, "databricks.bundles.jobs.ConditionTask.right", false]], "rules (jobshealthrules attribute)": [[1, "databricks.bundles.jobs.JobsHealthRules.rules", false]], "run_as (job attribute)": [[1, "databricks.bundles.jobs.Job.run_as", false]], "run_duration_seconds (jobshealthmetric attribute)": [[1, "databricks.bundles.jobs.JobsHealthMetric.RUN_DURATION_SECONDS", false]], "run_if (task attribute)": [[1, "databricks.bundles.jobs.Task.run_if", false]], "run_job_task (task attribute)": [[1, "databricks.bundles.jobs.Task.run_job_task", false]], "runif (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.RunIf", false]], "runjobtask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.RunJobTask", false]], "runtime_engine (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.runtime_engine", false]], "runtimeengine (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.RuntimeEngine", false]], "s3 (clusterlogconf attribute)": [[1, "databricks.bundles.jobs.ClusterLogConf.s3", false]], "s3 (initscriptinfo attribute)": [[1, "databricks.bundles.jobs.InitScriptInfo.s3", false]], "s3storageinfo (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.S3StorageInfo", false]], "schedule (job attribute)": [[1, "databricks.bundles.jobs.Job.schedule", false]], "schema (dbttask attribute)": [[1, "databricks.bundles.jobs.DbtTask.schema", false]], "service_principal_name (jobrunas attribute)": [[1, "databricks.bundles.jobs.JobRunAs.service_principal_name", false]], "service_principal_name (permission attribute)": [[1, "databricks.bundles.jobs.Permission.service_principal_name", false]], "severity (class in databricks.bundles.core)": [[0, "databricks.bundles.core.Severity", false]], "severity (diagnostic attribute)": [[0, "databricks.bundles.core.Diagnostic.severity", false]], "single_user (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.SINGLE_USER", false]], "single_user_name (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.single_user_name", false]], "source (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Source", false]], "source (dbttask attribute)": [[1, "databricks.bundles.jobs.DbtTask.source", false]], "source (notebooktask attribute)": [[1, "databricks.bundles.jobs.NotebookTask.source", false]], "source (sparkpythontask attribute)": [[1, "databricks.bundles.jobs.SparkPythonTask.source", false]], "source (sqltaskfile attribute)": [[1, "databricks.bundles.jobs.SqlTaskFile.source", false]], "spark_conf (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.spark_conf", false]], "spark_env_vars (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.spark_env_vars", false]], "spark_jar_task (task attribute)": [[1, "databricks.bundles.jobs.Task.spark_jar_task", false]], "spark_python_task (task attribute)": [[1, "databricks.bundles.jobs.Task.spark_python_task", false]], "spark_submit_task (task attribute)": [[1, "databricks.bundles.jobs.Task.spark_submit_task", false]], "spark_version (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.spark_version", false]], "sparkjartask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.SparkJarTask", false]], "sparkpythontask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.SparkPythonTask", false]], "sparksubmittask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.SparkSubmitTask", false]], "spec (jobenvironment attribute)": [[1, "databricks.bundles.jobs.JobEnvironment.spec", false]], "spot (awsavailability attribute)": [[1, "databricks.bundles.jobs.AwsAvailability.SPOT", false]], "spot_azure (azureavailability attribute)": [[1, "databricks.bundles.jobs.AzureAvailability.SPOT_AZURE", false]], "spot_bid_max_price (azureattributes attribute)": [[1, "databricks.bundles.jobs.AzureAttributes.spot_bid_max_price", false]], "spot_bid_price_percent (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.spot_bid_price_percent", false]], "spot_with_fallback (awsavailability attribute)": [[1, "databricks.bundles.jobs.AwsAvailability.SPOT_WITH_FALLBACK", false]], "spot_with_fallback_azure (azureavailability attribute)": [[1, "databricks.bundles.jobs.AzureAvailability.SPOT_WITH_FALLBACK_AZURE", false]], "sql_task (task attribute)": [[1, "databricks.bundles.jobs.Task.sql_task", false]], "sqltask (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.SqlTask", false]], "sqltaskalert (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.SqlTaskAlert", false]], "sqltaskdashboard (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.SqlTaskDashboard", false]], "sqltaskfile (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.SqlTaskFile", false]], "sqltaskquery (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.SqlTaskQuery", false]], "sqltasksubscription (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.SqlTaskSubscription", false]], "ssh_public_keys (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.ssh_public_keys", false]], "standard (runtimeengine attribute)": [[1, "databricks.bundles.jobs.RuntimeEngine.STANDARD", false]], "streaming_backlog_bytes (jobshealthmetric attribute)": [[1, "databricks.bundles.jobs.JobsHealthMetric.STREAMING_BACKLOG_BYTES", false]], "streaming_backlog_files (jobshealthmetric attribute)": [[1, "databricks.bundles.jobs.JobsHealthMetric.STREAMING_BACKLOG_FILES", false]], "streaming_backlog_records (jobshealthmetric attribute)": [[1, "databricks.bundles.jobs.JobsHealthMetric.STREAMING_BACKLOG_RECORDS", false]], "streaming_backlog_seconds (jobshealthmetric attribute)": [[1, "databricks.bundles.jobs.JobsHealthMetric.STREAMING_BACKLOG_SECONDS", false]], "subscriptions (sqltaskalert attribute)": [[1, "databricks.bundles.jobs.SqlTaskAlert.subscriptions", false]], "subscriptions (sqltaskdashboard attribute)": [[1, "databricks.bundles.jobs.SqlTaskDashboard.subscriptions", false]], "summary (diagnostic attribute)": [[0, "databricks.bundles.core.Diagnostic.summary", false]], "t (class in databricks.bundles.core)": [[0, "databricks.bundles.core.T", false]], "table_names (tableupdatetriggerconfiguration attribute)": [[1, "databricks.bundles.jobs.TableUpdateTriggerConfiguration.table_names", false]], "table_update (triggersettings attribute)": [[1, "databricks.bundles.jobs.TriggerSettings.table_update", false]], "tableupdatetriggerconfiguration (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.TableUpdateTriggerConfiguration", false]], "tags (job attribute)": [[1, "databricks.bundles.jobs.Job.tags", false]], "target (bundle attribute)": [[0, "databricks.bundles.core.Bundle.target", false]], "task (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Task", false]], "task (foreachtask attribute)": [[1, "databricks.bundles.jobs.ForEachTask.task", false]], "task_key (task attribute)": [[1, "databricks.bundles.jobs.Task.task_key", false]], "task_key (taskdependency attribute)": [[1, "databricks.bundles.jobs.TaskDependency.task_key", false]], "taskdependency (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.TaskDependency", false]], "taskemailnotifications (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.TaskEmailNotifications", false]], "tasknotificationsettings (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.TaskNotificationSettings", false]], "tasks (job attribute)": [[1, "databricks.bundles.jobs.Job.tasks", false]], "throughput_optimized_hdd (ebsvolumetype attribute)": [[1, "databricks.bundles.jobs.EbsVolumeType.THROUGHPUT_OPTIMIZED_HDD", false]], "timeout_seconds (job attribute)": [[1, "databricks.bundles.jobs.Job.timeout_seconds", false]], "timeout_seconds (task attribute)": [[1, "databricks.bundles.jobs.Task.timeout_seconds", false]], "timezone_id (cronschedule attribute)": [[1, "databricks.bundles.jobs.CronSchedule.timezone_id", false]], "trigger (job attribute)": [[1, "databricks.bundles.jobs.Job.trigger", false]], "triggersettings (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.TriggerSettings", false]], "type (variable attribute)": [[0, "databricks.bundles.core.Variable.type", false]], "unit (periodictriggerconfiguration attribute)": [[1, "databricks.bundles.jobs.PeriodicTriggerConfiguration.unit", false]], "unpaused (pausestatus attribute)": [[1, "databricks.bundles.jobs.PauseStatus.UNPAUSED", false]], "url (dockerimage attribute)": [[1, "databricks.bundles.jobs.DockerImage.url", false]], "url (filearrivaltriggerconfiguration attribute)": [[1, "databricks.bundles.jobs.FileArrivalTriggerConfiguration.url", false]], "use_ml_runtime (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.use_ml_runtime", false]], "use_preemptible_executors (gcpattributes attribute)": [[1, "databricks.bundles.jobs.GcpAttributes.use_preemptible_executors", false]], "used_commit (gitsnapshot attribute)": [[1, "databricks.bundles.jobs.GitSnapshot.used_commit", false]], "user_isolation (datasecuritymode attribute)": [[1, "databricks.bundles.jobs.DataSecurityMode.USER_ISOLATION", false]], "user_name (jobrunas attribute)": [[1, "databricks.bundles.jobs.JobRunAs.user_name", false]], "user_name (permission attribute)": [[1, "databricks.bundles.jobs.Permission.user_name", false]], "user_name (sqltasksubscription attribute)": [[1, "databricks.bundles.jobs.SqlTaskSubscription.user_name", false]], "username (dockerbasicauth attribute)": [[1, "databricks.bundles.jobs.DockerBasicAuth.username", false]], "value (jobshealthrule attribute)": [[1, "databricks.bundles.jobs.JobsHealthRule.value", false]], "value (variable property)": [[0, "databricks.bundles.core.Variable.value", false]], "variable (class in databricks.bundles.core)": [[0, "databricks.bundles.core.Variable", false]], "variables (bundle attribute)": [[0, "databricks.bundles.core.Bundle.variables", false]], "variables() (in module databricks.bundles.core)": [[0, "databricks.bundles.core.variables", false]], "volumes (initscriptinfo attribute)": [[1, "databricks.bundles.jobs.InitScriptInfo.volumes", false]], "volumesstorageinfo (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.VolumesStorageInfo", false]], "wait_after_last_change_seconds (filearrivaltriggerconfiguration attribute)": [[1, "databricks.bundles.jobs.FileArrivalTriggerConfiguration.wait_after_last_change_seconds", false]], "wait_after_last_change_seconds (tableupdatetriggerconfiguration attribute)": [[1, "databricks.bundles.jobs.TableUpdateTriggerConfiguration.wait_after_last_change_seconds", false]], "warehouse_id (dbttask attribute)": [[1, "databricks.bundles.jobs.DbtTask.warehouse_id", false]], "warehouse_id (notebooktask attribute)": [[1, "databricks.bundles.jobs.NotebookTask.warehouse_id", false]], "warehouse_id (sqltask attribute)": [[1, "databricks.bundles.jobs.SqlTask.warehouse_id", false]], "warning (severity attribute)": [[0, "databricks.bundles.core.Severity.WARNING", false]], "webhook (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.Webhook", false]], "webhook_notifications (job attribute)": [[1, "databricks.bundles.jobs.Job.webhook_notifications", false]], "webhook_notifications (task attribute)": [[1, "databricks.bundles.jobs.Task.webhook_notifications", false]], "webhooknotifications (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.WebhookNotifications", false]], "weeks (periodictriggerconfigurationtimeunit attribute)": [[1, "databricks.bundles.jobs.PeriodicTriggerConfigurationTimeUnit.WEEKS", false]], "whl (library attribute)": [[1, "databricks.bundles.jobs.Library.whl", false]], "workload_type (clusterspec attribute)": [[1, "databricks.bundles.jobs.ClusterSpec.workload_type", false]], "workloadtype (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.WorkloadType", false]], "workspace (initscriptinfo attribute)": [[1, "databricks.bundles.jobs.InitScriptInfo.workspace", false]], "workspace (source attribute)": [[1, "databricks.bundles.jobs.Source.WORKSPACE", false]], "workspacestorageinfo (class in databricks.bundles.jobs)": [[1, "databricks.bundles.jobs.WorkspaceStorageInfo", false]], "zone_id (awsattributes attribute)": [[1, "databricks.bundles.jobs.AwsAttributes.zone_id", false]], "zone_id (gcpattributes attribute)": [[1, "databricks.bundles.jobs.GcpAttributes.zone_id", false]]}, "objects": {"databricks.bundles.core": [[0, 0, 1, "", "Bundle"], [0, 0, 1, "", "Diagnostic"], [0, 0, 1, "", "Diagnostics"], [0, 0, 1, "", "Location"], [0, 0, 1, "", "Resource"], [0, 0, 1, "", "ResourceMutator"], [0, 0, 1, "", "Resources"], [0, 0, 1, "", "Severity"], [0, 0, 1, "", "T"], [0, 0, 1, "", "Variable"], [0, 4, 1, "", "job_mutator"], [0, 1, 1, "", "load_resources_from_current_package_module"], [0, 1, 1, "", "load_resources_from_module"], [0, 1, 1, "", "load_resources_from_modules"], [0, 1, 1, "", "load_resources_from_package_module"], [0, 4, 1, "", "variables"]], "databricks.bundles.core.Bundle": [[0, 1, 1, "", "resolve_variable"], [0, 1, 1, "", "resolve_variable_list"], [0, 2, 1, "", "target"], [0, 2, 1, "", "variables"]], "databricks.bundles.core.Diagnostic": [[0, 1, 1, "", "as_dict"], [0, 2, 1, "", "detail"], [0, 2, 1, "", "location"], [0, 2, 1, "", "path"], [0, 2, 1, "", "severity"], [0, 2, 1, "", "summary"]], "databricks.bundles.core.Diagnostics": [[0, 1, 1, "", "create_error"], [0, 1, 1, "", "create_warning"], [0, 1, 1, "", "extend"], [0, 1, 1, "", "extend_tuple"], [0, 1, 1, "", "from_exception"], [0, 1, 1, "", "has_error"], [0, 2, 1, "", "items"]], "databricks.bundles.core.Location": [[0, 1, 1, "", "as_dict"], [0, 2, 1, "", "column"], [0, 2, 1, "", "file"], [0, 1, 1, "", "from_callable"], [0, 1, 1, "", "from_stack_frame"], [0, 2, 1, "", "line"]], "databricks.bundles.core.ResourceMutator": [[0, 2, 1, "", "function"], [0, 2, 1, "", "resource_type"]], "databricks.bundles.core.Resources": [[0, 1, 1, "", "add_diagnostic_error"], [0, 1, 1, "", "add_diagnostic_warning"], [0, 1, 1, "", "add_diagnostics"], [0, 1, 1, "", "add_job"], [0, 1, 1, "", "add_location"], [0, 1, 1, "", "add_resource"], [0, 1, 1, "", "add_resources"], [0, 3, 1, "", "diagnostics"], [0, 3, 1, "", "jobs"]], "databricks.bundles.core.Severity": [[0, 2, 1, "", "ERROR"], [0, 2, 1, "", "WARNING"]], "databricks.bundles.core.Variable": [[0, 2, 1, "", "path"], [0, 2, 1, "", "type"], [0, 3, 1, "", "value"]], "databricks.bundles.jobs": [[1, 0, 1, "", "Adlsgen2Info"], [1, 0, 1, "", "AutoScale"], [1, 0, 1, "", "AwsAttributes"], [1, 0, 1, "", "AwsAvailability"], [1, 0, 1, "", "AzureAttributes"], [1, 0, 1, "", "AzureAvailability"], [1, 0, 1, "", "ClientsTypes"], [1, 0, 1, "", "ClusterLogConf"], [1, 0, 1, "", "ClusterSpec"], [1, 0, 1, "", "Condition"], [1, 0, 1, "", "ConditionTask"], [1, 0, 1, "", "ConditionTaskOp"], [1, 0, 1, "", "Continuous"], [1, 0, 1, "", "CronSchedule"], [1, 0, 1, "", "DataSecurityMode"], [1, 0, 1, "", "DbfsStorageInfo"], [1, 0, 1, "", "DbtTask"], [1, 0, 1, "", "DockerBasicAuth"], [1, 0, 1, "", "DockerImage"], [1, 0, 1, "", "EbsVolumeType"], [1, 0, 1, "", "Environment"], [1, 0, 1, "", "FileArrivalTriggerConfiguration"], [1, 0, 1, "", "ForEachTask"], [1, 0, 1, "", "GcpAttributes"], [1, 0, 1, "", "GcpAvailability"], [1, 0, 1, "", "GcsStorageInfo"], [1, 0, 1, "", "GitProvider"], [1, 0, 1, "", "GitSnapshot"], [1, 0, 1, "", "GitSource"], [1, 0, 1, "", "InitScriptInfo"], [1, 0, 1, "", "Job"], [1, 0, 1, "", "JobCluster"], [1, 0, 1, "", "JobEmailNotifications"], [1, 0, 1, "", "JobEnvironment"], [1, 0, 1, "", "JobNotificationSettings"], [1, 0, 1, "", "JobParameterDefinition"], [1, 0, 1, "", "JobRunAs"], [1, 0, 1, "", "JobsHealthMetric"], [1, 0, 1, "", "JobsHealthOperator"], [1, 0, 1, "", "JobsHealthRule"], [1, 0, 1, "", "JobsHealthRules"], [1, 0, 1, "", "Library"], [1, 0, 1, "", "LocalFileInfo"], [1, 0, 1, "", "LogAnalyticsInfo"], [1, 0, 1, "", "MavenLibrary"], [1, 0, 1, "", "NotebookTask"], [1, 0, 1, "", "PauseStatus"], [1, 0, 1, "", "PeriodicTriggerConfiguration"], [1, 0, 1, "", "PeriodicTriggerConfigurationTimeUnit"], [1, 0, 1, "", "Permission"], [1, 0, 1, "", "PipelineTask"], [1, 0, 1, "", "PythonPyPiLibrary"], [1, 0, 1, "", "PythonWheelTask"], [1, 0, 1, "", "QueueSettings"], [1, 0, 1, "", "RCranLibrary"], [1, 0, 1, "", "RunIf"], [1, 0, 1, "", "RunJobTask"], [1, 0, 1, "", "RuntimeEngine"], [1, 0, 1, "", "S3StorageInfo"], [1, 0, 1, "", "Source"], [1, 0, 1, "", "SparkJarTask"], [1, 0, 1, "", "SparkPythonTask"], [1, 0, 1, "", "SparkSubmitTask"], [1, 0, 1, "", "SqlTask"], [1, 0, 1, "", "SqlTaskAlert"], [1, 0, 1, "", "SqlTaskDashboard"], [1, 0, 1, "", "SqlTaskFile"], [1, 0, 1, "", "SqlTaskQuery"], [1, 0, 1, "", "SqlTaskSubscription"], [1, 0, 1, "", "TableUpdateTriggerConfiguration"], [1, 0, 1, "", "Task"], [1, 0, 1, "", "TaskDependency"], [1, 0, 1, "", "TaskEmailNotifications"], [1, 0, 1, "", "TaskNotificationSettings"], [1, 0, 1, "", "TriggerSettings"], [1, 0, 1, "", "VolumesStorageInfo"], [1, 0, 1, "", "Webhook"], [1, 0, 1, "", "WebhookNotifications"], [1, 0, 1, "", "WorkloadType"], [1, 0, 1, "", "WorkspaceStorageInfo"]], "databricks.bundles.jobs.Adlsgen2Info": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "destination"], [1, 1, 1, "", "from_dict"]], "databricks.bundles.jobs.AutoScale": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "max_workers"], [1, 2, 1, "", "min_workers"]], "databricks.bundles.jobs.AwsAttributes": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "availability"], [1, 2, 1, "", "ebs_volume_count"], [1, 2, 1, "", "ebs_volume_iops"], [1, 2, 1, "", "ebs_volume_size"], [1, 2, 1, "", "ebs_volume_throughput"], [1, 2, 1, "", "ebs_volume_type"], [1, 2, 1, "", "first_on_demand"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "instance_profile_arn"], [1, 2, 1, "", "spot_bid_price_percent"], [1, 2, 1, "", "zone_id"]], "databricks.bundles.jobs.AwsAvailability": [[1, 2, 1, "", "ON_DEMAND"], [1, 2, 1, "", "SPOT"], [1, 2, 1, "", "SPOT_WITH_FALLBACK"]], "databricks.bundles.jobs.AzureAttributes": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "availability"], [1, 2, 1, "", "first_on_demand"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "log_analytics_info"], [1, 2, 1, "", "spot_bid_max_price"]], "databricks.bundles.jobs.AzureAvailability": [[1, 2, 1, "", "ON_DEMAND_AZURE"], [1, 2, 1, "", "SPOT_AZURE"], [1, 2, 1, "", "SPOT_WITH_FALLBACK_AZURE"]], "databricks.bundles.jobs.ClientsTypes": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "jobs"], [1, 2, 1, "", "notebooks"]], "databricks.bundles.jobs.ClusterLogConf": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "dbfs"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "s3"]], "databricks.bundles.jobs.ClusterSpec": [[1, 2, 1, "", "apply_policy_default_values"], [1, 1, 1, "", "as_dict"], [1, 2, 1, "", "autoscale"], [1, 2, 1, "", "autotermination_minutes"], [1, 2, 1, "", "aws_attributes"], [1, 2, 1, "", "azure_attributes"], [1, 2, 1, "", "cluster_log_conf"], [1, 2, 1, "", "cluster_name"], [1, 2, 1, "", "custom_tags"], [1, 2, 1, "", "data_security_mode"], [1, 2, 1, "", "docker_image"], [1, 2, 1, "", "driver_instance_pool_id"], [1, 2, 1, "", "driver_node_type_id"], [1, 2, 1, "", "enable_elastic_disk"], [1, 2, 1, "", "enable_local_disk_encryption"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "gcp_attributes"], [1, 2, 1, "", "init_scripts"], [1, 2, 1, "", "instance_pool_id"], [1, 2, 1, "", "is_single_node"], [1, 2, 1, "", "node_type_id"], [1, 2, 1, "", "num_workers"], [1, 2, 1, "", "policy_id"], [1, 2, 1, "", "runtime_engine"], [1, 2, 1, "", "single_user_name"], [1, 2, 1, "", "spark_conf"], [1, 2, 1, "", "spark_env_vars"], [1, 2, 1, "", "spark_version"], [1, 2, 1, "", "ssh_public_keys"], [1, 2, 1, "", "use_ml_runtime"], [1, 2, 1, "", "workload_type"]], "databricks.bundles.jobs.Condition": [[1, 2, 1, "", "ALL_UPDATED"], [1, 2, 1, "", "ANY_UPDATED"]], "databricks.bundles.jobs.ConditionTask": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "left"], [1, 2, 1, "", "op"], [1, 2, 1, "", "right"]], "databricks.bundles.jobs.ConditionTaskOp": [[1, 2, 1, "", "EQUAL_TO"], [1, 2, 1, "", "GREATER_THAN"], [1, 2, 1, "", "GREATER_THAN_OR_EQUAL"], [1, 2, 1, "", "LESS_THAN"], [1, 2, 1, "", "LESS_THAN_OR_EQUAL"], [1, 2, 1, "", "NOT_EQUAL"]], "databricks.bundles.jobs.Continuous": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "pause_status"]], "databricks.bundles.jobs.CronSchedule": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "pause_status"], [1, 2, 1, "", "quartz_cron_expression"], [1, 2, 1, "", "timezone_id"]], "databricks.bundles.jobs.DataSecurityMode": [[1, 2, 1, "", "DATA_SECURITY_MODE_AUTO"], [1, 2, 1, "", "DATA_SECURITY_MODE_DEDICATED"], [1, 2, 1, "", "DATA_SECURITY_MODE_STANDARD"], [1, 2, 1, "", "LEGACY_PASSTHROUGH"], [1, 2, 1, "", "LEGACY_SINGLE_USER"], [1, 2, 1, "", "LEGACY_SINGLE_USER_STANDARD"], [1, 2, 1, "", "LEGACY_TABLE_ACL"], [1, 2, 1, "", "NONE"], [1, 2, 1, "", "SINGLE_USER"], [1, 2, 1, "", "USER_ISOLATION"]], "databricks.bundles.jobs.DbfsStorageInfo": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "destination"], [1, 1, 1, "", "from_dict"]], "databricks.bundles.jobs.DbtTask": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "catalog"], [1, 2, 1, "", "commands"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "profiles_directory"], [1, 2, 1, "", "project_directory"], [1, 2, 1, "", "schema"], [1, 2, 1, "", "source"], [1, 2, 1, "", "warehouse_id"]], "databricks.bundles.jobs.DockerBasicAuth": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "password"], [1, 2, 1, "", "username"]], "databricks.bundles.jobs.DockerImage": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "basic_auth"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "url"]], "databricks.bundles.jobs.EbsVolumeType": [[1, 2, 1, "", "GENERAL_PURPOSE_SSD"], [1, 2, 1, "", "THROUGHPUT_OPTIMIZED_HDD"]], "databricks.bundles.jobs.Environment": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "client"], [1, 2, 1, "", "dependencies"], [1, 1, 1, "", "from_dict"]], "databricks.bundles.jobs.FileArrivalTriggerConfiguration": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "min_time_between_triggers_seconds"], [1, 2, 1, "", "url"], [1, 2, 1, "", "wait_after_last_change_seconds"]], "databricks.bundles.jobs.ForEachTask": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "concurrency"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "inputs"], [1, 2, 1, "", "task"]], "databricks.bundles.jobs.GcpAttributes": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "availability"], [1, 2, 1, "", "boot_disk_size"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "google_service_account"], [1, 2, 1, "", "local_ssd_count"], [1, 2, 1, "", "use_preemptible_executors"], [1, 2, 1, "", "zone_id"]], "databricks.bundles.jobs.GcpAvailability": [[1, 2, 1, "", "ON_DEMAND_GCP"], [1, 2, 1, "", "PREEMPTIBLE_GCP"], [1, 2, 1, "", "PREEMPTIBLE_WITH_FALLBACK_GCP"]], "databricks.bundles.jobs.GcsStorageInfo": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "destination"], [1, 1, 1, "", "from_dict"]], "databricks.bundles.jobs.GitProvider": [[1, 2, 1, "", "AWS_CODE_COMMIT"], [1, 2, 1, "", "AZURE_DEV_OPS_SERVICES"], [1, 2, 1, "", "BITBUCKET_CLOUD"], [1, 2, 1, "", "BITBUCKET_SERVER"], [1, 2, 1, "", "GIT_HUB"], [1, 2, 1, "", "GIT_HUB_ENTERPRISE"], [1, 2, 1, "", "GIT_LAB"], [1, 2, 1, "", "GIT_LAB_ENTERPRISE_EDITION"]], "databricks.bundles.jobs.GitSnapshot": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "used_commit"]], "databricks.bundles.jobs.GitSource": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "git_branch"], [1, 2, 1, "", "git_commit"], [1, 2, 1, "", "git_provider"], [1, 2, 1, "", "git_tag"], [1, 2, 1, "", "git_url"]], "databricks.bundles.jobs.InitScriptInfo": [[1, 2, 1, "", "abfss"], [1, 1, 1, "", "as_dict"], [1, 2, 1, "", "dbfs"], [1, 2, 1, "", "file"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "gcs"], [1, 2, 1, "", "s3"], [1, 2, 1, "", "volumes"], [1, 2, 1, "", "workspace"]], "databricks.bundles.jobs.Job": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "budget_policy_id"], [1, 2, 1, "", "continuous"], [1, 2, 1, "", "description"], [1, 2, 1, "", "email_notifications"], [1, 2, 1, "", "environments"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "git_source"], [1, 2, 1, "", "health"], [1, 2, 1, "", "job_clusters"], [1, 2, 1, "", "max_concurrent_runs"], [1, 2, 1, "", "name"], [1, 2, 1, "", "notification_settings"], [1, 2, 1, "", "parameters"], [1, 2, 1, "", "permissions"], [1, 2, 1, "", "queue"], [1, 2, 1, "", "run_as"], [1, 2, 1, "", "schedule"], [1, 2, 1, "", "tags"], [1, 2, 1, "", "tasks"], [1, 2, 1, "", "timeout_seconds"], [1, 2, 1, "", "trigger"], [1, 2, 1, "", "webhook_notifications"]], "databricks.bundles.jobs.JobCluster": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "job_cluster_key"], [1, 2, 1, "", "new_cluster"]], "databricks.bundles.jobs.JobEmailNotifications": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "no_alert_for_skipped_runs"], [1, 2, 1, "", "on_duration_warning_threshold_exceeded"], [1, 2, 1, "", "on_failure"], [1, 2, 1, "", "on_start"], [1, 2, 1, "", "on_streaming_backlog_exceeded"], [1, 2, 1, "", "on_success"]], "databricks.bundles.jobs.JobEnvironment": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "environment_key"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "spec"]], "databricks.bundles.jobs.JobNotificationSettings": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "no_alert_for_canceled_runs"], [1, 2, 1, "", "no_alert_for_skipped_runs"]], "databricks.bundles.jobs.JobParameterDefinition": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "default"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "name"]], "databricks.bundles.jobs.JobRunAs": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "service_principal_name"], [1, 2, 1, "", "user_name"]], "databricks.bundles.jobs.JobsHealthMetric": [[1, 2, 1, "", "RUN_DURATION_SECONDS"], [1, 2, 1, "", "STREAMING_BACKLOG_BYTES"], [1, 2, 1, "", "STREAMING_BACKLOG_FILES"], [1, 2, 1, "", "STREAMING_BACKLOG_RECORDS"], [1, 2, 1, "", "STREAMING_BACKLOG_SECONDS"]], "databricks.bundles.jobs.JobsHealthOperator": [[1, 2, 1, "", "GREATER_THAN"]], "databricks.bundles.jobs.JobsHealthRule": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "metric"], [1, 2, 1, "", "op"], [1, 2, 1, "", "value"]], "databricks.bundles.jobs.JobsHealthRules": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "rules"]], "databricks.bundles.jobs.Library": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "cran"], [1, 2, 1, "", "egg"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "jar"], [1, 2, 1, "", "maven"], [1, 2, 1, "", "pypi"], [1, 2, 1, "", "requirements"], [1, 2, 1, "", "whl"]], "databricks.bundles.jobs.LocalFileInfo": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "destination"], [1, 1, 1, "", "from_dict"]], "databricks.bundles.jobs.LogAnalyticsInfo": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "log_analytics_primary_key"], [1, 2, 1, "", "log_analytics_workspace_id"]], "databricks.bundles.jobs.MavenLibrary": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "coordinates"], [1, 2, 1, "", "exclusions"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "repo"]], "databricks.bundles.jobs.NotebookTask": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "base_parameters"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "notebook_path"], [1, 2, 1, "", "source"], [1, 2, 1, "", "warehouse_id"]], "databricks.bundles.jobs.PauseStatus": [[1, 2, 1, "", "PAUSED"], [1, 2, 1, "", "UNPAUSED"]], "databricks.bundles.jobs.PeriodicTriggerConfiguration": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "interval"], [1, 2, 1, "", "unit"]], "databricks.bundles.jobs.PeriodicTriggerConfigurationTimeUnit": [[1, 2, 1, "", "DAYS"], [1, 2, 1, "", "HOURS"], [1, 2, 1, "", "WEEKS"]], "databricks.bundles.jobs.Permission": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "group_name"], [1, 2, 1, "", "level"], [1, 2, 1, "", "service_principal_name"], [1, 2, 1, "", "user_name"]], "databricks.bundles.jobs.PipelineTask": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "full_refresh"], [1, 2, 1, "", "pipeline_id"]], "databricks.bundles.jobs.PythonPyPiLibrary": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "package"], [1, 2, 1, "", "repo"]], "databricks.bundles.jobs.PythonWheelTask": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "entry_point"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "named_parameters"], [1, 2, 1, "", "package_name"], [1, 2, 1, "", "parameters"]], "databricks.bundles.jobs.QueueSettings": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "enabled"], [1, 1, 1, "", "from_dict"]], "databricks.bundles.jobs.RCranLibrary": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "package"], [1, 2, 1, "", "repo"]], "databricks.bundles.jobs.RunIf": [[1, 2, 1, "", "ALL_DONE"], [1, 2, 1, "", "ALL_FAILED"], [1, 2, 1, "", "ALL_SUCCESS"], [1, 2, 1, "", "AT_LEAST_ONE_FAILED"], [1, 2, 1, "", "AT_LEAST_ONE_SUCCESS"], [1, 2, 1, "", "NONE_FAILED"]], "databricks.bundles.jobs.RunJobTask": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "job_id"], [1, 2, 1, "", "job_parameters"]], "databricks.bundles.jobs.RuntimeEngine": [[1, 2, 1, "", "NULL"], [1, 2, 1, "", "PHOTON"], [1, 2, 1, "", "STANDARD"]], "databricks.bundles.jobs.S3StorageInfo": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "canned_acl"], [1, 2, 1, "", "destination"], [1, 2, 1, "", "enable_encryption"], [1, 2, 1, "", "encryption_type"], [1, 2, 1, "", "endpoint"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "kms_key"], [1, 2, 1, "", "region"]], "databricks.bundles.jobs.Source": [[1, 2, 1, "", "GIT"], [1, 2, 1, "", "WORKSPACE"]], "databricks.bundles.jobs.SparkJarTask": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "jar_uri"], [1, 2, 1, "", "main_class_name"], [1, 2, 1, "", "parameters"]], "databricks.bundles.jobs.SparkPythonTask": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "parameters"], [1, 2, 1, "", "python_file"], [1, 2, 1, "", "source"]], "databricks.bundles.jobs.SparkSubmitTask": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "parameters"]], "databricks.bundles.jobs.SqlTask": [[1, 2, 1, "", "alert"], [1, 1, 1, "", "as_dict"], [1, 2, 1, "", "dashboard"], [1, 2, 1, "", "file"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "parameters"], [1, 2, 1, "", "query"], [1, 2, 1, "", "warehouse_id"]], "databricks.bundles.jobs.SqlTaskAlert": [[1, 2, 1, "", "alert_id"], [1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "pause_subscriptions"], [1, 2, 1, "", "subscriptions"]], "databricks.bundles.jobs.SqlTaskDashboard": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "custom_subject"], [1, 2, 1, "", "dashboard_id"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "pause_subscriptions"], [1, 2, 1, "", "subscriptions"]], "databricks.bundles.jobs.SqlTaskFile": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "path"], [1, 2, 1, "", "source"]], "databricks.bundles.jobs.SqlTaskQuery": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "query_id"]], "databricks.bundles.jobs.SqlTaskSubscription": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "destination_id"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "user_name"]], "databricks.bundles.jobs.TableUpdateTriggerConfiguration": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "condition"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "min_time_between_triggers_seconds"], [1, 2, 1, "", "table_names"], [1, 2, 1, "", "wait_after_last_change_seconds"]], "databricks.bundles.jobs.Task": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "condition_task"], [1, 2, 1, "", "dbt_task"], [1, 2, 1, "", "depends_on"], [1, 2, 1, "", "description"], [1, 2, 1, "", "disable_auto_optimization"], [1, 2, 1, "", "email_notifications"], [1, 2, 1, "", "environment_key"], [1, 2, 1, "", "existing_cluster_id"], [1, 2, 1, "", "for_each_task"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "health"], [1, 2, 1, "", "job_cluster_key"], [1, 2, 1, "", "libraries"], [1, 2, 1, "", "max_retries"], [1, 2, 1, "", "min_retry_interval_millis"], [1, 2, 1, "", "new_cluster"], [1, 2, 1, "", "notebook_task"], [1, 2, 1, "", "notification_settings"], [1, 2, 1, "", "pipeline_task"], [1, 2, 1, "", "python_wheel_task"], [1, 2, 1, "", "retry_on_timeout"], [1, 2, 1, "", "run_if"], [1, 2, 1, "", "run_job_task"], [1, 2, 1, "", "spark_jar_task"], [1, 2, 1, "", "spark_python_task"], [1, 2, 1, "", "spark_submit_task"], [1, 2, 1, "", "sql_task"], [1, 2, 1, "", "task_key"], [1, 2, 1, "", "timeout_seconds"], [1, 2, 1, "", "webhook_notifications"]], "databricks.bundles.jobs.TaskDependency": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "outcome"], [1, 2, 1, "", "task_key"]], "databricks.bundles.jobs.TaskEmailNotifications": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "no_alert_for_skipped_runs"], [1, 2, 1, "", "on_duration_warning_threshold_exceeded"], [1, 2, 1, "", "on_failure"], [1, 2, 1, "", "on_start"], [1, 2, 1, "", "on_streaming_backlog_exceeded"], [1, 2, 1, "", "on_success"]], "databricks.bundles.jobs.TaskNotificationSettings": [[1, 2, 1, "", "alert_on_last_attempt"], [1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "no_alert_for_canceled_runs"], [1, 2, 1, "", "no_alert_for_skipped_runs"]], "databricks.bundles.jobs.TriggerSettings": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "file_arrival"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "pause_status"], [1, 2, 1, "", "periodic"], [1, 2, 1, "", "table_update"]], "databricks.bundles.jobs.VolumesStorageInfo": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "destination"], [1, 1, 1, "", "from_dict"]], "databricks.bundles.jobs.Webhook": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "id"]], "databricks.bundles.jobs.WebhookNotifications": [[1, 1, 1, "", "as_dict"], [1, 1, 1, "", "from_dict"], [1, 2, 1, "", "on_duration_warning_threshold_exceeded"], [1, 2, 1, "", "on_failure"], [1, 2, 1, "", "on_start"], [1, 2, 1, "", "on_streaming_backlog_exceeded"], [1, 2, 1, "", "on_success"]], "databricks.bundles.jobs.WorkloadType": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "clients"], [1, 1, 1, "", "from_dict"]], "databricks.bundles.jobs.WorkspaceStorageInfo": [[1, 1, 1, "", "as_dict"], [1, 2, 1, "", "destination"], [1, 1, 1, "", "from_dict"]]}, "objnames": {"0": ["py", "class", "Python class"], "1": ["py", "method", "Python method"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "property", "Python property"], "4": ["py", "function", "Python function"]}, "objtypes": {"0": "py:class", "1": "py:method", "2": "py:attribute", "3": "py:property", "4": "py:function"}, "terms": {"": 1, "0": [0, 1], "04": 1, "1": 1, "10": 1, "100": 1, "1000": 1, "10000": 1, "11": 1, "12": [0, 1], "14": [0, 1], "15": 1, "1mb": 1, "2": 1, "200": 1, "2016": 1, "2200": 1, "25": 1, "27700": 1, "28000m": 1, "2a": 1, "3": [0, 1], "30": 1, "375gb": 1, "4": 1, "4096": 1, "45": 1, "5": 1, "50": 1, "500": 1, "60": 1, "7": 1, "8": 1, "A": [0, 1], "And": 0, "At": 1, "But": 1, "By": 1, "For": [0, 1], "If": [0, 1], "In": 1, "It": 1, "NOT": 1, "No": 1, "On": 1, "One": 1, "The": [0, 1], "Then": 1, "These": 1, "With": 1, "_": 1, "_cannot_": 1, "_t": 0, "abfss": 1, "abil": 1, "abl": 1, "about": [0, 1], "abov": 1, "absolut": 1, "access": [0, 1], "account": 1, "accumul": 0, "acl": 1, "acquir": 1, "across": [0, 1], "activ": 1, "actual": 1, "ad": 1, "add": [0, 1], "add_diagnost": 0, "add_diagnostic_error": 0, "add_diagnostic_warn": 0, "add_job": 0, "add_loc": 0, "add_resourc": 0, "addit": 1, "address": 1, "adl": 1, "adlsgen2info": 1, "admin": 1, "administr": 1, "affect": 1, "after": 1, "agent": 1, "alert": 1, "alert_id": 1, "alert_on_last_attempt": 1, "alia": 1, "all": [0, 1], "all_don": 1, "all_fail": 1, "all_success": 1, "all_upd": 1, "allow": [0, 1], "alphanumer": 1, "also": 1, "alwai": 1, "amazon": 1, "amazonaw": 1, "amazons3": 1, "amount": 1, "an": [0, 1], "analyt": 1, "ani": [0, 1], "annot": 0, "anoth": [0, 1], "any_upd": 1, "apach": 1, "api": 1, "append": 1, "appli": [0, 1], "applic": 1, "apply_policy_default_valu": 1, "appropri": 1, "ar": [0, 1], "archiv": 1, "argument": 1, "arrai": 1, "arriv": 1, "as_dict": [0, 1], "asset": [0, 2], "assign": 1, "associ": [0, 1], "at_least_one_fail": 1, "at_least_one_success": 1, "attach": 1, "attribut": [0, 1], "auto": 1, "automat": 1, "autosc": 1, "autoscal": 1, "autotermination_minut": 1, "avail": 1, "averag": 1, "aw": 1, "aws_attribut": 1, "aws_code_commit": 1, "awsattribut": 1, "awsavail": 1, "awscodecommit": 1, "az": 1, "azur": 1, "azure_attribut": 1, "azure_dev_ops_servic": 1, "azureattribut": 1, "azureavail": 1, "azuredevopsservic": 1, "backlog": 1, "bar": 0, "base": [0, 1], "base_paramet": 1, "basi": 1, "basic_auth": 1, "batch": 1, "becaus": 1, "been": 1, "befor": 1, "begin": 1, "behavior": 1, "being": [0, 1], "belong": 1, "below": 1, "best": 1, "between": 1, "bid": 1, "bitbucket_cloud": 1, "bitbucket_serv": 1, "bitbucketcloud": 1, "bitbucketserv": 1, "bool": [0, 1], "boolean": 1, "boot": 1, "boot_disk_s": 1, "both": 1, "branch": 1, "bucket": 1, "budget": 1, "budget_policy_id": 1, "built": 0, "bundl": [0, 1], "byte": 1, "call": 1, "callabl": 0, "caller": 0, "can": [0, 1], "cancel": 1, "canned_acl": 1, "canned_c": 1, "cannot": 1, "canon": 1, "capac": 1, "captur": 0, "case": 1, "catalog": 1, "catalog_nam": 1, "caus": [0, 1], "cell": 1, "central": 1, "certain": 1, "chang": [0, 1], "charact": 1, "check": 1, "choos": 1, "cl": 0, "class": 2, "classic": 1, "classmethod": [0, 1], "click": 1, "client": 1, "clientstyp": 1, "clone": 1, "cloud": 1, "cluster": 1, "cluster_log": 1, "cluster_log_bucket": 1, "cluster_log_conf": 1, "cluster_nam": 1, "clusterid": 1, "clusterlogconf": 1, "clusterspec": 1, "code": [0, 1], "collect": [0, 1], "column": 0, "com": 1, "combin": 0, "come": 1, "command": 1, "commit": 1, "commonconf": 1, "compar": 1, "comparison": 1, "compat": 1, "complet": 1, "complex": 0, "comput": 1, "concurr": 1, "condit": 1, "condition_task": 1, "conditiontask": 1, "conditiontaskop": 1, "conf": 1, "config": 0, "configur": [0, 1], "conjunct": 1, "connect": 1, "consecut": 1, "consid": 1, "consist": 1, "construct": 0, "consum": 1, "contain": [0, 1], "content": 1, "context": 1, "continu": 1, "control": 1, "coordin": 1, "core": [1, 2], "correctli": 1, "correspond": [0, 1], "could": 1, "cran": 1, "creat": [0, 1], "create_error": 0, "create_job": 0, "create_warn": 0, "creation": 1, "credenti": 1, "cron": 1, "cronschedul": 1, "cross": 1, "current": [0, 1], "custom": 1, "custom_subject": 1, "custom_tag": 1, "dai": 1, "dashboard": 1, "dashboard_id": 1, "data": [0, 1], "data_security_mod": 1, "data_security_mode_auto": 1, "data_security_mode_ded": 1, "data_security_mode_standard": 1, "databrick": [0, 1], "datacent": 1, "datasecuritymod": 1, "db": 1, "dbf": 1, "dbfsstorageinfo": 1, "dbr": 1, "dbt": 1, "dbt_task": 1, "dbttask": 1, "dbutil": 1, "decid": 1, "declar": 1, "decor": 2, "def": 0, "default": [0, 1], "default_dbr_vers": 0, "default_tag": 1, "defaultspotbidpriceperc": 1, "defin": [0, 1], "definit": 1, "delai": 1, "delet": 1, "deliv": 1, "delta": 1, "demand": 1, "depend": 1, "depends_on": 1, "deploi": 1, "deploy": 1, "deprec": 1, "depth": 0, "describ": 1, "descript": [0, 1], "desir": [0, 1], "destin": 1, "destination_id": 1, "detail": [0, 1], "determin": 1, "dev": 1, "develop": 0, "devic": 1, "df": 1, "diagnost": 0, "dict": [0, 1], "dictionari": 0, "differ": [0, 1], "dir": 1, "directli": 1, "directori": 1, "disabl": 1, "disable_auto_optim": 1, "disk": 1, "do": [0, 1], "doc": 1, "docker": 1, "docker_imag": 1, "dockerbasicauth": 1, "dockerimag": 1, "document": 1, "doe": 1, "doesn": 1, "down": 1, "driver": 1, "driver_instance_pool_id": 1, "driver_node_type_id": 1, "dspark": 1, "duplic": 0, "durat": 1, "dynam": 1, "e": [0, 1], "each": [0, 1], "east": 1, "eb": 1, "ebs0": 1, "ebs1": 1, "ebs_volume_count": 1, "ebs_volume_iop": 1, "ebs_volume_s": 1, "ebs_volume_throughput": 1, "ebs_volume_typ": 1, "ebsvolumetyp": 1, "effective_budget_policy_id": 1, "effective_spark_vers": 1, "egg": 1, "either": 1, "element": [0, 1], "els": 0, "email": 1, "email_notif": 1, "empti": 1, "en": 1, "enabl": 1, "enable_elastic_disk": 1, "enable_encrypt": 1, "enable_local_disk_encrypt": 1, "encod": 1, "encrypt": 1, "encryption_typ": 1, "end": 1, "endpoint": 1, "enforc": 1, "engin": 1, "enough": 1, "ensur": 1, "entir": 1, "entiti": 1, "entri": 1, "entry_point": 1, "entrypoint": 1, "environ": 1, "environment": 1, "environment_kei": 1, "equal": 1, "equal_to": 1, "error": [0, 1], "estim": 1, "etc": 1, "evalu": 1, "everi": 1, "evict": 1, "exact": 1, "exampl": [0, 1], "exc": 0, "exce": 1, "exceed": 1, "except": 0, "exclud": 1, "exclus": 1, "execut": 1, "executor": 1, "exist": 1, "existing_cluster_id": 1, "expect": 1, "experiment": 0, "explan": 0, "explicitli": 1, "export": 1, "express": 1, "extend": 0, "extend_tupl": 0, "extern": 1, "extra": 1, "extrajavaopt": 1, "face": 1, "fail": [0, 1], "fallback": 1, "fals": 1, "featur": 1, "fewer": 1, "field": 1, "file": [0, 1], "file_arriv": 1, "filearrivaltriggerconfigur": 1, "filesystem": 1, "fire": 1, "first": 1, "first_on_demand": 1, "fix": 1, "flag": 1, "float": 1, "fn": 0, "folder": 1, "follow": 1, "foo": [0, 1], "for_each_task": 1, "foreachtask": 1, "form": 1, "format": [0, 1], "former": 1, "forward": 1, "found": 1, "frequent": 1, "from": [0, 1], "from_cal": 0, "from_dict": 1, "from_except": 0, "from_stack_fram": 0, "full": 1, "full_refresh": 1, "fulli": [0, 1], "function": [0, 1], "further": 1, "futur": 1, "g": [0, 1], "gb": 1, "gc": 1, "gcloud": 1, "gcp": 1, "gcp_attribut": 1, "gcpattribut": 1, "gcpavail": 1, "gcsstorageinfo": 1, "gener": 1, "general_purpose_ssd": 1, "get": 1, "get_config": 0, "getorcr": 1, "gib": 1, "git": 1, "git_branch": 1, "git_commit": 1, "git_hub": 1, "git_hub_enterpris": 1, "git_lab": 1, "git_lab_enterprise_edit": 1, "git_provid": 1, "git_sourc": 1, "git_tag": 1, "git_url": 1, "github": 1, "githubenterpris": 1, "gitlab": 1, "gitlabenterpriseedit": 1, "gitprovid": 1, "gitsnapshot": 1, "gitsourc": 1, "given": [0, 1], "googl": 1, "google_service_account": 1, "govern": 1, "gp2": 1, "gp3": 1, "gpu": 1, "gradl": 1, "gradual": 1, "graph": 1, "greater": 1, "greater_than": 1, "greater_than_or_equ": 1, "group": 1, "group_nam": 1, "guarante": 0, "guid": 1, "ha": 1, "hadoop": 1, "half": 1, "happen": 1, "has_error": 0, "have": [0, 1], "hdd": 1, "head": 1, "health": 1, "heap": 1, "here": 1, "heterogen": 1, "high": 1, "higher": 1, "home": 1, "host": 1, "hour": 1, "howev": 1, "html": 1, "http": 1, "i": [0, 1], "iam": 1, "id": [0, 1], "identifi": [0, 1], "imag": 1, "immedi": 1, "imperson": 1, "implement": [0, 1, 2], "inact": 1, "includ": 1, "incom": 1, "increas": 1, "indefinit": 1, "index": 1, "indic": [0, 1], "ineffici": 1, "inform": [0, 1], "init": 1, "init_script": 1, "initi": [0, 1], "initscriptinfo": 1, "input": [0, 1], "insensit": 1, "instal": 1, "instanc": [0, 1], "instance_pool_id": 1, "instance_profile_arn": 1, "instead": [0, 1], "int": [0, 1], "intens": 1, "internal_error": 1, "interv": 1, "introduct": 1, "io": 1, "iop": 1, "is_single_nod": 1, "isol": 1, "issu": 1, "item": 0, "iter": [0, 1], "its": [0, 1], "jar": 1, "jar_uri": 1, "java": 1, "job": [0, 2], "job_clust": 1, "job_cluster_kei": 1, "job_id": 1, "job_mut": 0, "job_paramet": 1, "jobclust": 1, "jobemailnotif": 1, "jobenviron": 1, "jobnotificationset": 1, "jobparam": 0, "jobparameterdefinit": 1, "jobruna": 1, "jobshealthmetr": 1, "jobshealthoper": 1, "jobshealthrul": 1, "jobtaskset": 1, "json": 1, "jsoup": 1, "jvm": 1, "kei": 1, "kept": 1, "kill": 1, "kind": 1, "km": 1, "kms_kei": 1, "lag": 1, "languag": 1, "last": 1, "later": [0, 1], "latest": 1, "launch": 1, "lead": 1, "least": [0, 1], "leav": 1, "left": 1, "legaci": 1, "legacy_passthrough": 1, "legacy_single_us": 1, "legacy_single_user_standard": 1, "legacy_table_acl": 1, "length": 1, "less": 1, "less_than": 1, "less_than_or_equ": 1, "level": 1, "librari": 1, "life_cycle_st": 1, "lifetim": 1, "like": [0, 1], "limit": 1, "line": [0, 1], "list": [0, 1], "listnodetyp": 1, "live": 1, "load": [0, 1], "load_resourc": 0, "load_resources_from_current_package_modul": 0, "load_resources_from_modul": 0, "load_resources_from_package_modul": 0, "local": 1, "local_disk0": 1, "local_disk1": 1, "local_ssd_count": 1, "localfileinfo": 1, "locat": [0, 1], "log": 1, "log_analytics_info": 1, "log_analytics_primary_kei": 1, "log_analytics_workspace_id": 1, "loganalyticsinfo": 1, "login": 1, "long": 1, "low": 1, "luk": 1, "machin": 1, "mai": 1, "main": 1, "main_class_nam": 1, "major": 1, "make": 1, "manag": 1, "manual": 1, "map": 1, "master": 1, "match": 1, "maven": 1, "mavenlibrari": 1, "max": 1, "max_concurrent_run": 1, "max_retri": 1, "max_work": 1, "maximum": 1, "maxspotbidpriceperc": 1, "mean": 1, "memori": 1, "merg": 1, "met": 1, "metadata": 1, "method": [1, 2], "metric": 1, "microsoft": 1, "might": 1, "migrat": 1, "millisecond": 1, "min": 1, "min_retry_interval_milli": 1, "min_time_between_triggers_second": 1, "min_work": 1, "minim": 1, "minimum": 1, "minut": 1, "mode": 1, "model": 1, "modifi": 1, "modul": 0, "moduletyp": 0, "monitor": 1, "more": 1, "most": 1, "mount": 1, "msg": 0, "multipl": [0, 1], "must": [0, 1], "mutat": [0, 1], "mutual": 1, "my": 1, "my_job": 0, "my_job_mut": 0, "my_vari": 0, "myvari": 0, "name": [0, 1], "named_paramet": 1, "namespac": 1, "necessari": 1, "need": 1, "nest": 1, "net": 1, "never": 1, "new": [0, 1], "new_clust": 1, "no_alert_for_canceled_run": 1, "no_alert_for_skipped_run": 1, "node": 1, "node_type_id": 1, "non": 1, "none": [0, 1], "none_fail": 1, "nor": 1, "not_equ": 1, "note": 1, "notebook": 1, "notebook_path": 1, "notebook_task": 1, "notebooktask": 1, "notif": 1, "notifi": 1, "notification_set": 1, "now": 1, "null": 1, "num_work": 1, "number": 1, "numer": 1, "obei": 1, "object": [0, 1], "obtain": 1, "occur": [0, 1], "off": 1, "offset": 1, "omit": 1, "ommit": 1, "on_demand": 1, "on_demand_azur": 1, "on_demand_gcp": 1, "on_duration_warning_threshold_exceed": 1, "on_failur": 1, "on_start": 1, "on_streaming_backlog_exceed": 1, "on_success": 1, "onc": 1, "one": [0, 1], "ones": 1, "onli": 1, "op": 1, "oper": 1, "operand": 1, "optim": 1, "option": [0, 1], "order": [0, 1], "org": 1, "other": [0, 1], "otherwis": [0, 1], "out": 1, "outcom": 1, "output": 0, "outstand": 1, "over": 1, "overlap": 1, "overload": 1, "overrid": 1, "overridden": 1, "overview": 1, "own": 1, "owner": 1, "packag": [0, 1, 2], "package_modul": 0, "package_nam": 1, "packagenam": 1, "pair": [0, 1], "panel": 1, "param": 0, "paramet": [0, 1], "parent": 1, "part": 1, "particular": 1, "pass": 1, "passthrough": 1, "password": 1, "past": 1, "path": [0, 1], "pattern": 0, "paus": 1, "pause_statu": 1, "pause_subscript": 1, "pausestatu": 1, "per": 1, "percentag": 1, "perform": 1, "period": 1, "periodictriggerconfigur": 1, "periodictriggerconfigurationtimeunit": 1, "permiss": 1, "persist": 1, "photon": 1, "pick": 1, "pip": 1, "pipelin": 1, "pipeline_id": 1, "pipeline_task": 1, "pipelinetask": 1, "place": 1, "placement": 1, "plan": 1, "platform": 1, "pleas": 1, "point": 1, "polici": 1, "policy_id": 1, "pool": 1, "port": 1, "possibl": 1, "pre": 1, "preemptibl": 1, "preemptible_gcp": 1, "preemptible_with_fallback_gcp": 1, "prefix": 1, "present": 1, "preserv": 1, "preview": 1, "previous": 1, "price": 1, "princip": 1, "print": 0, "privat": 1, "pro": 1, "product": 0, "profil": 1, "profiles_directori": 1, "program": 1, "programmat": 0, "project": 1, "project_directori": 1, "properti": [0, 1], "provid": 1, "provis": 1, "public": 1, "pull": 1, "purpos": 1, "putobjectacl": 1, "py": 1, "pypa": 1, "pypi": 1, "python": [0, 1, 2], "python_fil": 1, "python_wheel_task": 1, "pythonpypilibrari": 1, "pythonwheeltask": 1, "qualifi": 0, "quartz": 1, "quartz_cron_express": 1, "queri": 1, "query_id": 1, "queue": 1, "queueset": 1, "r": 1, "r3": 1, "rang": 1, "rather": 1, "rcranlibrari": 1, "read": 1, "receiv": 1, "recipi": 1, "recommend": 1, "recurs": 0, "refer": [0, 1], "referenc": 1, "reflect": 1, "refresh": 1, "regardless": 1, "region": 1, "rel": [0, 1], "relat": 1, "releas": 1, "relev": 0, "reliabl": 1, "remaind": 1, "remot": 1, "remov": 1, "replac": 0, "repo": 1, "report": 0, "repositori": 1, "represent": 1, "request": 1, "requir": 1, "resent": 1, "reserv": 1, "reset": 1, "resid": 1, "resiz": 1, "resolv": [0, 1], "resolve_vari": 0, "resolve_variable_list": 0, "resourc": [0, 1], "resource_nam": 0, "resource_typ": 0, "resourcemut": 0, "respect": 1, "respond": 1, "restart": 1, "result_st": 1, "retri": 1, "retriev": 1, "retry_on_timeout": 1, "return": 0, "reus": 1, "right": 1, "role": 1, "room": 1, "root": 1, "rule": 1, "run": 1, "run_a": 1, "run_duration_second": 1, "run_if": 1, "run_job_task": 1, "runif": 1, "runjobtask": 1, "runnow": 1, "runtim": [0, 1], "runtime_engin": 1, "runtimeengin": 1, "s3": 1, "s3storageinfo": 1, "safeti": 1, "same": [0, 1], "satisfi": 1, "scala2": [0, 1], "scale": 1, "schedul": 1, "schema": 1, "schema_nam": 1, "scratch": 1, "script": 1, "search": 1, "second": 1, "secur": 1, "see": [0, 1], "select": 0, "self": [0, 1], "send": 1, "sent": 1, "sequenti": 1, "seri": 1, "serial": 1, "server": 1, "serverless": 1, "servic": 1, "service_principal_nam": 1, "serviceprincip": 1, "set": 1, "sever": 0, "sh": 1, "share": 1, "short": 0, "should": [0, 1], "shown": 1, "shuffl": 1, "side": 1, "similarli": 1, "simplejson": 1, "sinc": 1, "singl": [0, 1], "single_us": 1, "single_user_nam": 1, "size": 1, "skip": 1, "slash": 1, "slf4j": 1, "smaller": 1, "snapshot": 1, "so": 1, "some": 1, "soon": 1, "sourc": [0, 1], "space": 1, "spark": 1, "spark_conf": 1, "spark_daemon_java_opt": 1, "spark_env_var": 1, "spark_info": 1, "spark_jar_task": 1, "spark_local_dir": 1, "spark_python_task": 1, "spark_submit_task": 1, "spark_vers": 1, "spark_worker_memori": 1, "sparkcontext": 1, "sparkjartask": 1, "sparkpythontask": 1, "sparksubmittask": 1, "sparkvers": 1, "spec": 1, "specif": 1, "specifi": [0, 1], "spot": 1, "spot_azur": 1, "spot_bid_max_pric": 1, "spot_bid_price_perc": 1, "spot_with_fallback": 1, "spot_with_fallback_azur": 1, "spread": 1, "sql": [0, 1], "sql_task": 1, "sqltask": 1, "sqltaskalert": 1, "sqltaskdashboard": 1, "sqltaskfil": 1, "sqltaskqueri": 1, "sqltasksubscript": 1, "ssd": 1, "sse": 1, "ssh": 1, "ssh_public_kei": 1, "stabl": 1, "stage": 0, "standard": 1, "start": 1, "state": 1, "static": 0, "stop": 1, "storag": 1, "store": 1, "str": [0, 1], "stream": 1, "streaming_backlog_byt": 1, "streaming_backlog_fil": 1, "streaming_backlog_record": 1, "streaming_backlog_second": 1, "strictli": 1, "string": 1, "structur": 0, "style": 1, "subject": 1, "submit": 1, "submodul": 0, "subpath": 1, "subscrib": 1, "subscript": 1, "subsequ": 1, "subset": 1, "succeed": 1, "success": 1, "successfulli": 1, "suggest": 1, "summari": 0, "support": [0, 1, 2], "suppos": 1, "sure": 1, "syntax": 1, "system": 1, "t": [0, 1], "tabl": 1, "table_nam": 1, "table_upd": 1, "tableupdatetriggerconfigur": 1, "tag": 1, "take": 1, "taken": 1, "target": [0, 1], "task": [0, 1], "task_kei": 1, "taskdepend": 1, "taskemailnotif": 1, "tasknotificationset": 1, "term": 1, "termin": 1, "test": [0, 1], "than": 1, "thei": [0, 1], "them": 1, "thi": [0, 1], "threshold": 1, "through": 1, "throughput": 1, "throughput_optimized_hdd": 1, "thrown": 1, "time": 1, "timed_out": 1, "timeout": 1, "timeout_second": 1, "timezon": 1, "timezone_id": 1, "top": 1, "total": 1, "treat": 0, "trigger": 1, "triggerset": 1, "true": [0, 1], "try": 1, "tupl": 0, "twice": 1, "two": 1, "txt": 1, "type": [0, 1], "typevar": 0, "u": 1, "ubuntu": 1, "uc": 1, "ui": 1, "unavail": 1, "underl": 0, "underutil": 1, "uniqu": [0, 1], "unit": [0, 1], "uniti": 1, "unless": 1, "unpaus": 1, "unset": 1, "unspecifi": 1, "unsuccess": 1, "unsuccessfulli": 1, "until": 1, "up": 1, "updat": 1, "uri": 1, "url": 1, "us": [0, 1], "usag": 1, "use_ml_runtim": 1, "use_preemptible_executor": 1, "used_commit": 1, "user": [0, 1], "user1": 1, "user_isol": 1, "user_nam": 1, "usernam": 1, "utc": 1, "utf": 1, "util": 1, "valid": [0, 1], "valu": [0, 1], "var": 0, "variabl": [0, 1], "variant": 0, "vc": 1, "version": [0, 1], "versu": 1, "via": 1, "vm": 1, "volum": 1, "volumesstorageinfo": 1, "wa": [0, 1], "wai": 1, "wait": 1, "wait_after_last_change_second": 1, "want": 1, "warehous": [0, 1], "warehouse_id": [0, 1], "warn": 0, "we": [0, 1], "web": 1, "webhook": 1, "webhook_notif": 1, "webhooknotif": 1, "week": 1, "well": 1, "west": 1, "what": 1, "wheel": 1, "when": [0, 1], "where": [0, 1], "wherea": 1, "whether": 1, "which": 1, "while": 1, "whl": 1, "who": 1, "whose": 1, "widget": 1, "window": 1, "within": [0, 1], "without": 1, "won": 1, "work": 1, "worker": 1, "workload": 1, "workload_typ": 1, "workloadtyp": 1, "workspac": 1, "workspacestorageinfo": 1, "write": 1, "wsf": 1, "x": [0, 1], "xlarg": 1, "y": 1, "yml": 0, "you": [0, 1], "your": [0, 1], "zero": 1, "zone": 1, "zone_id": 1}, "titles": ["Core", "Jobs", "databricks-bundles (Beta)"], "titleterms": {"beta": 2, "bundl": 2, "class": [0, 1], "core": 0, "databrick": 2, "decor": 0, "job": 1, "method": 0}})
\ No newline at end of file