Skip to content

Commit

Permalink
Merge pull request #434 from GoogleCloudPlatform/kfp-update
Browse files Browse the repository at this point in the history
KFP update
  • Loading branch information
takumiohym authored Apr 17, 2024
2 parents 32288c5 + efbe9e4 commit 948c904
Show file tree
Hide file tree
Showing 24 changed files with 468 additions and 231 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ steps:
args:
- '-c'
- |
dsl-compile-v2 # TODO
kfp dsl compile # TODO
env:
- 'PIPELINE_ROOT=gs://$PROJECT_ID-kfp-artifact-store/pipeline'
- 'PROJECT_ID=$PROJECT_ID'
Expand Down
4 changes: 2 additions & 2 deletions notebooks/kubeflow_pipelines/cicd/labs/kfp_cicd_vertex.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@
"\n",
"In the cell below, write a docker file that\n",
"* Uses `gcr.io/deeplearning-platform-release/base-cpu` as base image\n",
"* Install the python packages `kfp` with version `1.8.22 ` and `google-cloud-aiplatform` with version `1.43.0`\n",
"* Install the python packages `kfp` with version `2.4.0 `, `google-cloud-aiplatform` with version `1.43.0` and `fire`\n",
"* Starts `/bin/bash` as entrypoint"
]
},
Expand Down Expand Up @@ -184,7 +184,7 @@
" args:\n",
" - '-c'\n",
" - |\n",
" dsl-compile-v2 # TODO\n",
" kfp dsl compile # TODO\n",
" env:\n",
" - 'PIPELINE_ROOT=gs://$PROJECT_ID-kfp-artifact-store/pipeline'\n",
" - 'PROJECT_ID=$PROJECT_ID'\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,11 @@
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Lightweight component training function."""
from kfp.v2.dsl import component
from kfp.dsl import component


@component(
base_image="python:3.8",
output_component_file="covertype_kfp_train_and_deploy.yaml",
packages_to_install=["google-cloud-aiplatform"],
)
def train_and_deploy(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,11 @@
"""Lightweight component tuning function."""
from typing import NamedTuple

from kfp.v2.dsl import component
from kfp.dsl import component


@component(
base_image="python:3.8",
output_component_file="covertype_kfp_tune_hyperparameters.yaml",
packages_to_install=["google-cloud-aiplatform"],
)
def tune_hyperparameters(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ steps:
args:
- '-c'
- |
dsl-compile-v2 --py pipeline.py --output covertype_kfp_pipeline.json
kfp dsl compile --py pipeline.py --output covertype_kfp_pipeline.yaml
env:
- 'PIPELINE_ROOT=gs://$PROJECT_ID-kfp-artifact-store/pipeline'
- 'PROJECT_ID=$PROJECT_ID'
Expand All @@ -49,7 +49,7 @@ steps:
args:
- '-c'
- |
python $_PIPELINE_FOLDER/kfp-cli_vertex/run_pipeline.py --project_id=$PROJECT_ID --template_path=$_PIPELINE_FOLDER/pipeline_vertex/covertype_kfp_pipeline.json --display_name=coverype_kfp_pipeline --region=$_REGION
python $_PIPELINE_FOLDER/kfp-cli_vertex/run_pipeline.py --project_id=$PROJECT_ID --template_path=$_PIPELINE_FOLDER/pipeline_vertex/covertype_kfp_pipeline.yaml --display_name=coverype_kfp_pipeline --region=$_REGION
# Push the images to Artifact Registry
images: ['us-docker.pkg.dev/$PROJECT_ID/asl-artifact-repo/trainer_image_covertype_vertex:latest']
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
FROM gcr.io/deeplearning-platform-release/base-cpu
RUN pip install kfp==1.8.22
RUN pip install google-cloud-aiplatform==1.43.0
RUN pip install kfp==2.4.0 google-cloud-aiplatform==1.43.0 fire
ENTRYPOINT ["/bin/bash"]
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,11 @@
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Lightweight component training function."""
from kfp.v2.dsl import component
from kfp.dsl import component


@component(
base_image="python:3.8",
output_component_file="covertype_kfp_train_and_deploy.yaml",
packages_to_install=["google-cloud-aiplatform"],
)
def train_and_deploy(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,11 @@
"""Lightweight component tuning function."""
from typing import NamedTuple

from kfp.v2.dsl import component
from kfp.dsl import component


@component(
base_image="python:3.8",
output_component_file="covertype_kfp_tune_hyperparameters.yaml",
packages_to_install=["google-cloud-aiplatform"],
)
def tune_hyperparameters(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,13 +126,16 @@
"\n",
"import os\n",
"\n",
"from google_cloud_pipeline_components.aiplatform import (\n",
"from google_cloud_pipeline_components.v1.automl.training_job import (\n",
" AutoMLTabularTrainingJobRunOp,\n",
")\n",
"from google_cloud_pipeline_components.v1.dataset import TabularDatasetCreateOp\n",
"from google_cloud_pipeline_components.v1.endpoint import (\n",
" EndpointCreateOp,\n",
" ModelDeployOp,\n",
" TabularDatasetCreateOp,\n",
")\n",
"from kfp.v2 import dsl\n",
"\n",
"from kfp import dsl\n",
"\n",
"PIPELINE_ROOT = os.getenv(\"PIPELINE_ROOT\")\n",
"PROJECT = os.getenv(\"PROJECT\")\n",
Expand Down Expand Up @@ -224,7 +227,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"We compile the pipeline from the Python file we generated into a JSON description using the following command:"
"We compile the pipeline from the Python file we generated into a YAML description using the following command:"
]
},
{
Expand All @@ -233,7 +236,7 @@
"metadata": {},
"outputs": [],
"source": [
"PIPELINE_JSON = \"covertype_automl_vertex_pipeline.json\""
"PIPELINE_YAML = \"covertype_automl_vertex_pipeline.yaml\""
]
},
{
Expand All @@ -242,7 +245,7 @@
"source": [
"### Exercise\n",
"\n",
"Compile the pipeline with the `dsl-compile-v2` command line:"
"Compile the pipeline with the `kfp dsl compile` command line:"
]
},
{
Expand All @@ -261,11 +264,11 @@
"**Note:** You can also use the Python SDK to compile the pipeline:\n",
"\n",
"```python\n",
"from kfp.v2 import compiler\n",
"from kfp import compiler\n",
"\n",
"compiler.Compiler().compile(\n",
" pipeline_func=create_pipeline, \n",
" package_path=PIPELINE_JSON,\n",
" package_path=PIPELINE_YAML,\n",
")\n",
"\n",
"```"
Expand All @@ -284,7 +287,7 @@
"metadata": {},
"outputs": [],
"source": [
"!head {PIPELINE_JSON}"
"!head {PIPELINE_YAML}"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"We compile the pipeline from the Python file we generated into a JSON description using the following command:"
"We compile the pipeline from the Python file we generated into a YAML description using the following command:"
]
},
{
Expand All @@ -338,7 +338,7 @@
"metadata": {},
"outputs": [],
"source": [
"PIPELINE_JSON = \"covertype_kfp_pipeline.json\""
"PIPELINE_YAML = \"covertype_kfp_pipeline.yaml\""
]
},
{
Expand All @@ -347,7 +347,7 @@
"source": [
"### Exercise\n",
"\n",
"Compile the `pipeline_vertex/pipeline.py` with the `dsl-compile-v2` command line:"
"Compile the `pipeline_vertex/pipeline.py` with the `kfp dsl compile` command line:"
]
},
{
Expand All @@ -368,7 +368,7 @@
"```python\n",
"compiler.Compiler().compile(\n",
" pipeline_func=covertype_train, \n",
" package_path=PIPELINE_JSON,\n",
" package_path=PIPELINE_YAML,\n",
")\n",
"\n",
"```"
Expand All @@ -387,7 +387,7 @@
"metadata": {},
"outputs": [],
"source": [
"!head {PIPELINE_JSON}"
"!head {PIPELINE_YAML}"
]
},
{
Expand Down Expand Up @@ -415,6 +415,51 @@
"# TODO"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### (Optional) Compile the custom components\n",
"If you want to easily reuse your custom components in other pipelines, consider compiling them into YAML format. <br>\n",
"Be aware that component YAML and pipeline YAML represent distinct objects."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from kfp import compiler, components\n",
"from pipeline_vertex.training_lightweight_component import train_and_deploy\n",
"from pipeline_vertex.tuning_lightweight_component import tune_hyperparameters\n",
"\n",
"compiler.Compiler().compile(\n",
" train_and_deploy, \"covertype_kfp_train_and_deploy.yaml\"\n",
")\n",
"compiler.Compiler().compile(\n",
" tune_hyperparameters, \"covertype_kfp_tune_hyperparameters.yaml\"\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"To reuse a compiled component in another pipeline, easily load it using `kfp.components.load_component_from_file(<YAML PATH>)`."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"custom_component = components.load_component_from_file(\n",
" \"covertype_kfp_train_and_deploy.yaml\"\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
Expand Down
Loading

0 comments on commit 948c904

Please sign in to comment.