From 91c808097ee9cf768c6d23156807f0775c5bd716 Mon Sep 17 00:00:00 2001 From: Roshan Vijayan Date: Fri, 10 Jan 2025 19:32:15 +0530 Subject: [PATCH] feat: IBM Connectivity Pack Kafka Connectors 1.0.0 (#1) * feat: IBM Connectivity Pack Kafka Connectors 1.0.0 IBM Connectivity Pack Kafka Connectors 1.0.0 Signed-off-by: Roshan Vijayan Co-authored-by: Varada Sunanda --- .github/ISSUE_TEMPLATE/BUG-REPORT.yml | 76 ++++ .github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml | 41 ++ .github/PULL_REQUEST_TEMPLATE | 32 ++ .github/workflows/github-build-release.yml | 43 ++ .github/workflows/verify-commits.yml | 27 ++ .gitignore | 8 + LICENSE | 202 +++++++++ README.md | 218 +++++++++- connectors/source-connector.md | 109 +++++ examples/kafka-connect.yaml | 84 ++++ examples/kafka-connector-source.yaml | 71 ++++ ibm-connectivity-pack/.helmignore | 24 ++ ibm-connectivity-pack/Chart.yaml | 22 + ibm-connectivity-pack/LICENSE | 202 +++++++++ ibm-connectivity-pack/README.md | 164 ++++++++ ibm-connectivity-pack/connector-config.json | 19 + .../templates/InstallEAPreHook.yaml | 32 ++ .../templates/InstallPreHook.yaml | 71 ++++ ibm-connectivity-pack/templates/NOTES.txt | 10 + ibm-connectivity-pack/templates/_helpers.tpl | 294 +++++++++++++ .../templates/_helpersEA.tpl | 107 +++++ ibm-connectivity-pack/templates/creds.yaml | 15 + ibm-connectivity-pack/templates/csConfig.yaml | 11 + .../templates/deployment.yaml | 396 ++++++++++++++++++ ibm-connectivity-pack/templates/earole.yaml | 25 ++ .../templates/earolebinding.yaml | 20 + .../templates/easerviceaccount.yaml | 11 + .../templates/envConfigMap.yaml | 13 + ibm-connectivity-pack/templates/hpa.yaml | 44 ++ .../templates/imagePullSecret.yaml | 15 + .../templates/installEAPostHook.yaml | 77 ++++ ibm-connectivity-pack/templates/proxy.yaml | 84 ++++ ibm-connectivity-pack/templates/role.yaml | 81 ++++ .../templates/rolebinding.yaml | 36 ++ ibm-connectivity-pack/templates/route.yaml | 22 + ibm-connectivity-pack/templates/service.yaml | 38 ++ .../templates/serviceaccount.yaml | 26 ++ .../templates/tokenStore.yaml | 13 + .../templates/uninstallPostHook.yaml | 38 ++ ibm-connectivity-pack/values.yaml | 99 +++++ systems/salesforce.md | 146 +++++++ 41 files changed, 3065 insertions(+), 1 deletion(-) create mode 100644 .github/ISSUE_TEMPLATE/BUG-REPORT.yml create mode 100644 .github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml create mode 100644 .github/PULL_REQUEST_TEMPLATE create mode 100644 .github/workflows/github-build-release.yml create mode 100644 .github/workflows/verify-commits.yml create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 connectors/source-connector.md create mode 100644 examples/kafka-connect.yaml create mode 100644 examples/kafka-connector-source.yaml create mode 100644 ibm-connectivity-pack/.helmignore create mode 100644 ibm-connectivity-pack/Chart.yaml create mode 100644 ibm-connectivity-pack/LICENSE create mode 100644 ibm-connectivity-pack/README.md create mode 100644 ibm-connectivity-pack/connector-config.json create mode 100644 ibm-connectivity-pack/templates/InstallEAPreHook.yaml create mode 100644 ibm-connectivity-pack/templates/InstallPreHook.yaml create mode 100644 ibm-connectivity-pack/templates/NOTES.txt create mode 100644 ibm-connectivity-pack/templates/_helpers.tpl create mode 100644 ibm-connectivity-pack/templates/_helpersEA.tpl create mode 100644 ibm-connectivity-pack/templates/creds.yaml create mode 100644 ibm-connectivity-pack/templates/csConfig.yaml create mode 100644 ibm-connectivity-pack/templates/deployment.yaml create mode 100644 ibm-connectivity-pack/templates/earole.yaml create mode 100644 ibm-connectivity-pack/templates/earolebinding.yaml create mode 100644 ibm-connectivity-pack/templates/easerviceaccount.yaml create mode 100644 ibm-connectivity-pack/templates/envConfigMap.yaml create mode 100644 ibm-connectivity-pack/templates/hpa.yaml create mode 100644 ibm-connectivity-pack/templates/imagePullSecret.yaml create mode 100644 ibm-connectivity-pack/templates/installEAPostHook.yaml create mode 100644 ibm-connectivity-pack/templates/proxy.yaml create mode 100644 ibm-connectivity-pack/templates/role.yaml create mode 100644 ibm-connectivity-pack/templates/rolebinding.yaml create mode 100644 ibm-connectivity-pack/templates/route.yaml create mode 100644 ibm-connectivity-pack/templates/service.yaml create mode 100644 ibm-connectivity-pack/templates/serviceaccount.yaml create mode 100644 ibm-connectivity-pack/templates/tokenStore.yaml create mode 100644 ibm-connectivity-pack/templates/uninstallPostHook.yaml create mode 100644 ibm-connectivity-pack/values.yaml create mode 100644 systems/salesforce.md diff --git a/.github/ISSUE_TEMPLATE/BUG-REPORT.yml b/.github/ISSUE_TEMPLATE/BUG-REPORT.yml new file mode 100644 index 0000000..9cb3871 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/BUG-REPORT.yml @@ -0,0 +1,76 @@ +name: "🐛 Bug Report" +description: File a bug report +title: "🐛 [BUG] - " +labels: ["bug"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! + - type: textarea + id: what-happened + attributes: + label: What happened? + description: Also tell us, what did you expect to happen? + placeholder: Tell us what you see! + value: "A bug happened!" + validations: + required: true + - type: textarea + attributes: + label: Steps To Reproduce + description: Steps to reproduce the behavior. + placeholder: | + 1. In this environment... + 2. With this config... + 3. Run '...' + 4. See error... + validations: + required: false + - type: textarea + attributes: + label: Environment + description: | + examples: + - **OS**: Ubuntu 20.04 + - **Openshift**: 4.8.2 + - **Kubernetes**: v1.23.12+8a6bfe4 + value: | + - OS: + - Openshift: + - Kubernetes: + render: markdown + validations: + required: false + - type: textarea + attributes: + label: Anything else? + description: | + Links? References? Anything that will give us more context about the issue you are encountering! + + Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in. + validations: + required: false + - type: dropdown + id: version + attributes: + label: Version + description: What version of our software are you running? + options: + - 1.0.0 (Default) + validations: + required: true + - type: textarea + id: logs + attributes: + label: Relevant log output + description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. + render: shell + - type: checkboxes + id: terms + attributes: + label: Code of Conduct + description: By submitting this issue, you agree to follow our [Code of Conduct](../CODE_OF_CONDUCT.md) + options: + - label: I agree to follow this project's Code of Conduct + required: true diff --git a/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml b/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml new file mode 100644 index 0000000..2088e73 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml @@ -0,0 +1,41 @@ +name: "💡 Feature Request" +description: Create a new ticket for a new feature request +title: "💡 [Feature] - <title>" +labels: + - "feature_request" +body: + - type: markdown + attributes: + value: | + **Thanks :heart: for taking the time to fill out this feature request report!** + We kindly ask that you search to see if an issue [already exists](https://github.com/ibm-messaging/connectivity-pack-kafka-connectors/issues?q=is%3Aissue+sort%3Acreated-desc+) for your feature. + We are also happy to accept contributions from our users. For more details see [here](https://github.com/ibm-messaging/connectivity-pack-kafka-connectors/blob/main/CONTRIBUTING.md). + - type: textarea + attributes: + label: Description + description: | + A clear and concise description of the feature you're interested in. + validations: + required: true + - type: textarea + attributes: + label: Suggested Solution + description: | + Describe the solution you'd like. A clear and concise description of what you want to happen. + validations: + required: true + - type: textarea + attributes: + label: Alternatives + description: | + Describe alternatives you've considered. + A clear and concise description of any alternative solutions or features you've considered. + validations: + required: false + - type: textarea + attributes: + label: Additional Context + description: | + Add any other context about the problem here. + validations: + required: false diff --git a/.github/PULL_REQUEST_TEMPLATE b/.github/PULL_REQUEST_TEMPLATE new file mode 100644 index 0000000..d35201d --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE @@ -0,0 +1,32 @@ +# Description + +<!-- Please include a summary of the changes and the related issue. Please also include relevant motivation and context. List any dependencies that are required for this change. --> + +Fixes # (issue) + +## Type of change + +Please delete options that are not relevant. + +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] New feature (non-breaking change which adds functionality) +- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) +- [ ] This change requires a documentation update + +## How Has This Been Tested? + +<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration + +- [ ] Test A +- [ ] Test B --> + +## Checklist + +- [ ] My code follows the style guidelines of this project +- [ ] I have performed a self-review of my code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] I have made corresponding changes to the documentation +- [ ] My changes generate no new warnings +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes +- [ ] Any dependent changes have been merged and published in downstream modules diff --git a/.github/workflows/github-build-release.yml b/.github/workflows/github-build-release.yml new file mode 100644 index 0000000..b161e27 --- /dev/null +++ b/.github/workflows/github-build-release.yml @@ -0,0 +1,43 @@ +name: Upload HelmChart To Release +on: + release: + types: [created] +jobs: + build: + name: Build and Upload helm Chart + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Read Helm Chart + id: chart + uses: jacobtomlinson/gha-read-helm-chart@master + with: + path: ibm-connectivity-pack + - name: Print Chart Details and set Chart Version + run: | + echo "Name - ${{ steps.chart.outputs.name }}" + echo "Version - ${{ steps.chart.outputs.version }}" + echo "App Version - ${{ steps.chart.outputs.appVersion }}" + echo "CHART_VERSION=${{ steps.chart.outputs.version }}" >> $GITHUB_ENV + GIT_REF_TAG=${{ github.ref }} + echo "SOURCE_TAG=${GIT_REF_TAG#refs/tags/}" >> $GITHUB_ENV + - name: Package Helm Chart + run: tar -czvf ibm-connectivity-pack-${{env.CHART_VERSION}}.tgz -C ibm-connectivity-pack . + - name: Get Release by Tag Name + id: get_release + uses: joutvhu/get-release@v1 + with: + tag_name: ${{env.SOURCE_TAG}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Upload Release Asset Helm Chart + id: upload-release-asset-helm-chart + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.get_release.outputs.upload_url }} + asset_path: ./ibm-connectivity-pack-${{env.CHART_VERSION}}.tgz + asset_name: ibm-connectivity-pack-${{env.CHART_VERSION}}.tgz + asset_content_type: application/tgz diff --git a/.github/workflows/verify-commits.yml b/.github/workflows/verify-commits.yml new file mode 100644 index 0000000..90ee83f --- /dev/null +++ b/.github/workflows/verify-commits.yml @@ -0,0 +1,27 @@ +name: Checkstyle and Tests + +on: + pull_request: + branches: + - 'main' + types: [opened, synchronize, reopened] + +jobs: + verify-commits: + name: Verify Commits + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + with: + ref: ${{ github.event.pull_request.head.sha }} + fetch-depth: 0 + + - name: Verify Signed-off-by + run: | + for commit in $(git rev-list --no-merges HEAD^..HEAD); do + if ! git log -1 --format=%B "$commit" | grep -q "^Signed-off-by: "; then + echo "Commit $commit is missing Signed-off-by line." + exit 1 + fi + done \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..fc79fbc --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +.idea +.DS_Store +.git_askpass +build +published_images.txt +pr_link.txt +EA_README.md +ibm-connectivity-pack/.helmignore diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d63a0f0 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + © Copyright IBM Corporation. 2017, 2018 + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md index bc63d36..8447bb0 100644 --- a/README.md +++ b/README.md @@ -1 +1,217 @@ -# connectivity-pack-kafka-connectors \ No newline at end of file +# Connectivity Pack Kafka connectors + +By using the IBM Connectivity Pack, Connectivity Pack Kafka connectors enable data streaming between external systems and Kafka. + +## Contents + +- [Prerequisites](#prerequisites) +- [Installing the IBM Connectivity Pack](#installing-the-ibm-connectivity-pack) +- [Starting Kafka Connect](#starting-kafka-connect) +- [Running the connectors](#running-the-connectors) +- [License](#license) + +## Prerequisites + +To run Connectivity Pack Kafka connectors, ensure you have: + +- IBM Event Streams installed, and you have the bootstrap address, an image pull secret called [`ibm-entitlement-key`](https://ibm.github.io/event-automation/es/installing/installing/#creating-an-image-pull-secret), certificates, and credentials required to access Kafka. +- The external system (for example, Salesforce) configured according to the [system-specific guidance](./systems/), with the required URLs and credentials to access the system. + + For information about the supported systems, see the [systems](./systems/) folder. + +- Either enabled [auto-creation of Kafka topics](https://ibm.github.io/event-automation/es/connecting/setting-up-connectors/#enabling-topic-creation-for-connectors) or pre-created all the required Kafka topics in the format that must be specified in the `connectivitypack.topic.name.format` section of the [`KafkaConnector` custom resource](#running-the-connectors). + + +## Installing the IBM Connectivity Pack + +The Connectivity Pack acts as an interface between Kafka Connect connectors and external systems you want to connect to. It can be deployed on OpenShift and other Kubernetes platforms by using the Connectivity Pack Helm chart. + +To install the Connectivity Pack, run the following command: + +```bash +helm install <RELEASE-NAME> <CONNECTIVITY-PACK-HELM-CHART-URL> --set license.licenseId=<LICENSE-ID>,license.accept=true -n <NAMESPACE> +``` + +Where: + +- `<RELEASE-NAME>` is the release name of your choice. For example, `ibm-connectivity-pack` +- `<CONNECTIVITY-PACK-HELM-CHART-URL>` is the URL of the latest version of the Connectivity Pack Helm chart. For example: `https://github.com/ibm-messaging/connectivity-pack-kafka-connectors/releases/download/1.0.0/ibm-connectivity-pack-1.0.0.tgz` +- `license.licenseId=<LICENSE-ID>` is the license identifier (ID) for the program that you purchased. For more information, see [licensing reference](https://ibm.github.io/event-automation/support/licensing/). +- `license.accept` determines whether the license is accepted (default is `false` if not specified). +- `<NAMESPACE>` is the namespace where you want to install the Connectivity Pack. This must be in the same namespace where an Event Streams instance is deployed. + +You can override the default configuration parameters by using the `--set` flag or by using a custom YAML file. For example, to set the `replicaCount` as `3`, you can use `--set replicaCount=3`. + +For more information about installing the Connectivity Pack, including a complete list of configuration parameters supported by the Helm chart, see [installing the Connectivity Pack](./ibm-connectivity-pack/README.md#configuring). + +## Starting Kafka Connect + +Configure the Kafka Connect runtime and include the configuration, certificates, and connectors for the Connectivity Pack by following these instructions. + +**Note:** For more information, see [setting up connectors](https://ibm.github.io/event-automation/es/connecting/setting-up-connectors/). + +1. Create a `KafkaConnect` custom resource to define the Kafka Connect runtime. An example custom resource is available in the [`examples`](/examples/kafka-connect.yaml) folder. You can edit the example custom resource file to meet on your requirements and to configure the following settings. + + - To use the pre-built connector JAR file, set the URL of the [latest release asset](https://public.dhe.ibm.com/ibmdl/export/pub/software/websphere/messaging/eventstreams/connectors/connectivitypack/) as the value for `spec.build.plugins[].artifacts[].url` as shown in the following example: + + ```yaml + plugins: + - name: connectivitypack-source-connector + artifacts: + - type: jar + url: https://public.dhe.ibm.com/ibmdl/export/pub/software/websphere/messaging/eventstreams/connectors/connectivitypack/<VERSION>/connectivity-pack-source-connector-<VERSION>-jar-with-dependencies.jar + ``` + + Where `<VERSION>` is the version of the Connectivity Pack connector JAR file. + + - To use the installed Connectivity Pack with `<RELEASE-NAME>`, update the `spec.template` section with the following configuration and certificates: + + - If you are using Event Streams version 11.5.2 and later, use the following configuration: + + ```yaml + template: + connectContainer: + env: + - name: CONNECTIVITYPACK_SERVICE_URL + valueFrom: + configMapKeyRef: + key: CONNECTIVITYPACK_SERVICE_URL + name: <RELEASE-NAME>-config + volumeMounts: + - mountPath: /mnt/connectivitypack/certificates + name: connectivitypack-certs + pod: + volumes: + - name: connectivitypack-certs + secret: + secretName: <RELEASE-NAME>-client-certificate + ``` + + - If you are using earlier versions of Event Streams than 11.5.2, use the following `externalConfiguration`: + + ```yaml + externalConfiguration: + env: + - name: CONNECTIVITYPACK_SERVICE_URL + valueFrom: + configMapKeyRef: + key: CONNECTIVITYPACK_SERVICE_URL + name: <RELEASE-NAME>-config + volumes: + - name: connectivitypack-certs + secret: + secretName: <RELEASE-NAME>-client-certificate + ``` + + Where `<RELEASE-NAME>` is the Helm release name for your Connectivity Pack installation. + + - The following section explains the environment variables that are used in the Kafka Connect configuration: + + - **`CONNECTIVITYPACK_SERVICE_URL`**: The URL of the Connectivity Pack that the Kafka connector uses to connect. For example: + + ```yaml + CONNECTIVITYPACK_SERVICE_URL: <connectivity-service-url> + ``` + + You can set this URL by using a `configMapKeyRef` that points to the ConfigMap of the Connectivity Pack or directly set it to the correct endpoint. Ensure that the URL is accessible from the Kafka Connect container. + + - **`CONNECTIVITYPACK_CERTS_PATH`**: The file path to the directory containing the certificates required for secure communication. This includes the client certificate, private key, and any intermediate certificates that are required for secure communication between the connector and the Connectivity Pack. For example: + + ```yaml + CONNECTIVITYPACK_CERTS_PATH: /mnt/connectivitypack/certificates + ``` + + By default, this is set to `/mnt/connectivitypack/certificates`. You can optionally specify this environment variable if your certificates are mounted at a different path. + + - **`CONNECTIVITYPACK_PKCS12_PASSWORD`**: The password that is used to access the PKCS12 certificate store. This environment variable is required for secure communication between the connector and the Connectivity Pack only if the PKCS12 file is password-protected. If the PKCS12 file does not have a password, you can set this as an empty string or skip configuring this environment variable. For example: + + ```yaml + template: + connectContainer: + env: + CONNECTIVITYPACK_PKCS12_PASSWORD: <your-pkcs12-password> + ``` + + + +1. Apply the configured `KafkaConnect` custom resource by using the `kubectl apply` command to start the Kafka Connect runtime. + +1. When Kafka Connect is successfully created, verify that the connector is available for use by checking the `status.connectorPlugins` section in the `KafkaConnect` custom resource. For the Connectivity Pack source connector to work, the following plug-in must be present: + + ```yaml + status: + connectorPlugins: + - class: com.ibm.eventstreams.connect.connectivitypacksource.ConnectivityPackSourceConnector + type: source + version: <version> + ``` + + +## Running the Connectors + +Configure your connector with information about your external system by following these instructions. + +**Note:** For more information, see [setting up connectors](https://ibm.github.io/event-automation/es/connecting/setting-up-connectors/#set-up-a-kafka-connector). + +1. Create a `KafkaConnector` custom resource to define your connector configuration. An example custom resource is available in the [`examples`](/examples/kafka-connector-source.yaml) folder. You can edit the custom resource file based on your requirements. + +1. Specify `com.ibm.eventstreams.connect.connectivitypacksource.ConnectivityPackSourceConnector` as the connector class name. + +1. Configure the connector properties such as the external system you want to connect to, objects, authentication, and data mapping in the `config` section as described in the [source connector documentation](./connectors/source-connector.md#configuration). You can find the supported values for your source system in the [system-specific guidance](./systems/salesforce.md#). The following is an example of a connector configuration for Salesforce: + + ```yaml + config: + # Which source system to connect to, for example, salesforce + connectivitypack.source: salesforce + + # URL to access the source system, for example, `https://<your-instance-name>.salesforce.com` + connectivitypack.source.url: <URL-of-the-data-source-instance>> + + # Credentials to access the source system using BASIC_OAUTH authentication. + connectivitypack.source.credentials.authType: BASIC_OAUTH + connectivitypack.source.credentials.accessTokenBasicOauth: <access-token> + connectivitypack.source.credentials.refreshTokenBasicOauth: <refresh-token> + connectivitypack.source.credentials.clientSecret: <client-secret> + connectivitypack.source.credentials.clientIdentity: <client-identity> + + # Objects and event-types to read from the datasource + connectivitypack.source.objects: '<object1>,<object2>,[<object3>]' + connectivitypack.source.<object1>.events: 'CREATED' + connectivitypack.source.<object2>.events: 'CREATED,UPDATED' + + # Optional, sets the format for Kafka topic names created by the connector. + # You can use placeholders like '${object}' and '${eventType}', which the connector will replace automatically. + # Including '${object}' or '${eventType}' in the format is optional. For example, '${object}-topic-name' is a valid format. + # By default, the format is '${object}-${eventType}', but it's shown here for clarity. + connectivitypack.topic.name.format: '${object}-${eventType}' + + # Specifies the converter class used to deserialize the message value. + # Change this to a different converter (for example, AvroConverter) if needed. + value.converter: org.apache.kafka.connect.json.JsonConverter + + # Controls whether the schema is included in the message. + # Set this to false to disable schema support, or to true to enable schema inclusion (for example, for Avro). + value.converter.schemas.enable: false + ``` + +1. Apply the configured `KafkaConnector` custom resource by using the `kubectl apply` command to start the connector. +1. Verify that the connector is running by checking the `status` section in the `KafkaConnector` custom resource: + + ```yaml + Status: + Conditions: + Last Transition Time: 2024-07-13T07:56:40.943007974Z + Status: True + Type: Ready + Connector Status: + Connector: + State: RUNNING + ``` + + +## License + +Copyright IBM Corp. 2024 + +IBM Connectivity Pack is licensed under the [IBM Event Automation license and IBM Cloud Pak for Integration license](https://ibm.biz/ea-license), while the Helm chart and documentation are licensed under the [Apache License, Version 2.0](./ibm-connectivity-pack/license.md). + diff --git a/connectors/source-connector.md b/connectors/source-connector.md new file mode 100644 index 0000000..4b6d068 --- /dev/null +++ b/connectors/source-connector.md @@ -0,0 +1,109 @@ +# Connectivity Pack source connector + +The Connectivity Pack source connector enables streaming data from source systems, such as Salesforce, into Kafka topics. These [Kafka Connect](http://kafka.apache.org/documentation.html#connect) connectors use the [IBM Connectivity Pack](../ibm-connectivity-pack/README.md) to enable the data flow between the source system and Kafka. + +The connector can be configured to stream the required data by specifying the source system, and a list of objects and associated events that are to be streamed. + +The connector uses a Connectivity Pack instance as a bridge that retrieves events from the source system and sends them to the connector for publishing to Kafka topics. + +## Configuration + +The following configuration options are supported and must be configured in the `config` section of the `KafkaConnector` custom resource. + +**Note:** See the [system-specific guidance](../systems/) for supported values of your source system, such as [Salesforce](../systems/salesforce.md). + +### Source information + +| Property | Type | Description | Valid values | +| --- | --- | --- | --- | +| `connectivitypack.source` | `string` | Specifies the source system from which data is retrieved. | A valid source, for example, `salesforce` | +| `connectivitypack.source.url` | `string` | The base URL of the source system. | A valid source URL in the format `https://.salesforce.com` | + +### Authentication + +| Property | Type | Description | Valid values | +| --- | --- | --- | --- | +| `connectivitypack.source.credentials.authType` | `string` | Specifies the authentication type for the source system. | Supported types, for example, `OAUTH2_PASSWORD` or `BASIC_OAUTH` | +| `connectivitypack.source.credentials.username` | `string` | The username associated with the source system's credentials. Required for `OAUTH2_PASSWORD`. | The username used for authentication. | +| `connectivitypack.source.credentials.password` | `string` | The password associated with the source system's credentials. Required for `OAUTH2_PASSWORD`. | The password used for authentication. | +| `connectivitypack.source.credentials.clientIdentity` | `string` | The client identity of the system to which the source system is connected to. Required for both `OAUTH2_PASSWORD` and `BASIC_OAUTH`. | The client identity of the system to which the source system is connected to. | +| `connectivitypack.source.credentials.clientSecret` | `string` | The client secret of the source system. Required for both `OAUTH2_PASSWORD` and `BASIC_OAUTH`. | The client secret of the source system's connected app. | +| `connectivitypack.source.credentials.accessTokenBasicOauth` | `string` | The OAuth access token used for authentication. Required for `BASIC_OAUTH`. | A valid access token that complies with the source system's requirements. | +| `connectivitypack.source.credentials.refreshTokenBasicOauth` | `string` | The refresh token used to renew the OAuth access token. Required for `BASIC_OAUTH`. | A valid refresh token that complies with the source system's requirements. | + +### Data mapping + +| Property | Type | Description | Valid values | +| --------------------------------------------- | -------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | +| `connectivitypack.source.objects` | `string` or comma-separated list | Specifies the objects in the source system that the connector will interact with. You can provide either a single object, or a comma-separated list of objects. | Values depend on your source system. In Salesforce, valid values are any of the `Platform Event` or `Change Data Capture` supporting objects. | +| `connectivitypack.source.<object>.events` | `string` or comma-separated list | Specifies the events for each object that the connector listens to. Each object specified in `connectivitypack.source.objects` must have corresponding events. `<object>` must be replaced with one of the specified objects. | Events supported by each object on your source system. In Salesforce, valid events are `CREATED`, `UPDATED`, or `DELETED`. | +| `connectivitypack.topic.name.format` | `string` | Sets the format for Kafka topic names created by the connector. You can use placeholders such as `${object}` and `${eventType}`, which the connector will replace automatically. Including `${object}` or `${eventType}` in the format is optional. For example, `${object}-topic-name` is a valid format. A topic will be created for each `object-eventType` combination. | Default: `${object}-${eventType}` | +| `connectivitypack.auto.correct.invalid.topic` | `boolean` | Optional: Automatically converts invalid topic names to valid Kafka topic names by replacing unsupported characters. For example, the topic name `*topicname` will be converted to `-topicname` by replacing `*` with `-`. | `true` or `false` | + +## Task distribution + +The distribution of tasks in the Connectivity Pack source connector depends on the objects and the associated events in the source system that are sent to Kafka. Each `object - event` combination is handled by a separate connector task that publishes messages to a distinct Kafka topic. + +- **Object:** Represents a data entity or record in the source system, such as `Account`, `Order_Event__e`, or `CaseChangeEvent`. +- **Event:** Specifies the type of action or state change related to the object, such as `CREATED`, `UPDATED`, or `DELETED`. + +For example, if the object is an `Order_Event__e` record, and the related event is a `CREATED` action, the events triggered when a new `Order_Event__e` record is created in the source system will be handled by a connector task, and such events will be published to the `Order_Event__e-CREATED` topic. + +**Note:** If the value of `spec.tasksMax` is configured to be less than the number of `object - event` combinations, the connector will fail with the following error: + +```shell +The connector `<name-of-connector>` has generated `<actual-number-of-tasks>` tasks, which is greater than `<value-given-in-tasksMax>`, the maximum number of tasks it is configured to create. +``` + +## Single Message Transformations (SMTs) + +The Salesforce connector can be configured with [transformations](https://kafka.apache.org/documentation.html#connect_transforms) to manipulate Kafka record keys, values, and headers. With these Single Message Transformations (SMTs) you can extract, cast, or modify fields for better control over your streaming data. + +To set the Kafka record key from a subset of fields in the source record, see the following example: + +1. Consider a record with the following structure: + + + ```json + { + "CreatedDate": "2024-12-11T11:00:32.299Z", + "CreatedById": "005dM000009CeUMQA0", + "OrderStatus__c": "B\\M*3G0\"q!", + "CustomerEmail__c": "usernmae@hotmail.com", + "OrderTotal__c": 626.48, + "OrderDate__c": "2024-02-18T16:05:06.506Z", + "OrderId__c": "d76306e1-2ff8-4f8d-8bb0-ac91a1caf25b", + "schema": "cKLHIOLKGudADKzibwggwA", + "event": { + "replayId": "40270513", + "EventUuid": "db784749-8d7d-403e-9885-dfddb016391f" + } + } + ``` + + +1. To configure the `replayId` from the `event` object as the Kafka record key, you can define a series of transformations as follows: + + ```shell + transforms: 'createEvent,extractEvent,extractReplayId' + + # First Transform: Convert the entire 'event' object to the key + transforms.createEvent.type: org.apache.kafka.connect.transforms.ValueToKey + transforms.createEvent.fields: event + # Result: Sets the entire 'event' object as the record's key. + + # Second Transform: Extract the 'event' object from the key + transforms.extractEvent.type: org.apache.kafka.connect.transforms.ExtractField$Key + transforms.extractEvent.field: event + # Result: Promotes the 'event' object from the key to the value/payload. + + # Third Transform: Extract 'replayId' from the key + transforms.extractReplayId.type: org.apache.kafka.connect.transforms.ExtractField$Key + transforms.extractReplayId.field: replayId + # Result: Extracts the 'replayId' field from the key. + ``` + + +1. After applying the SMTs, the Kafka record key is set to `"40270513"`, extracted from the `replayId` field in the `event` object. + + You can further modify or cast this key if required. For example, you can use an additional transformation to cast the key from a string into a number. diff --git a/examples/kafka-connect.yaml b/examples/kafka-connect.yaml new file mode 100644 index 0000000..b1e93ad --- /dev/null +++ b/examples/kafka-connect.yaml @@ -0,0 +1,84 @@ +apiVersion: eventstreams.ibm.com/v1beta2 +kind: KafkaConnect +metadata: + annotations: + eventstreams.ibm.com/use-connector-resources: "true" + name: <name> + namespace: <namespace> + labels: + backup.eventstreams.ibm.com/component: kafkaconnect +spec: + config: + exactly.once.source.support: ENABLED # To enable exactly-once delivery semantics + config.storage.topic: <groupid>-configs + group.id: <groupid> + offset.storage.topic: <groupid>-offsets + status.storage.topic: <groupid>-status + # To indicate the configurations (secrets) of connector is provided as K8s Secret + config.providers: file + config.providers.file.class: org.apache.kafka.common.config.provider.DirectoryConfigProvider + bootstrapServers: <bootstrapServers> # Replace with the bootstrap server address + resources: + limits: + cpu: 2000m + memory: 2Gi + authentication: + passwordSecret: + password: password + secretName: <kafka-user> # Replace with the name of the kafka user. + type: scram-sha-512 + username: <kafka-user> # Replace with the name of the kafka user. + template: + connectContainer: + env: + - name: CONNECTIVITYPACK_SERVICE_URL # DO NOT change the name + valueFrom: + configMapKeyRef: + key: CONNECTIVITYPACK_SERVICE_URL + name: <ibm-connectivitypack-config-map-name> # Replace with the name of the connectivity pack's config map + volumeMounts: + - mountPath: /mnt/connectivitypack/certificates + name: connectivitypack-certs + - mountPath: /mnt/salesforce-credential + name: salesforce-credential + securityContext: + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + privileged: false + readOnlyRootFilesystem: true + runAsNonRoot: true + pod: + metadata: + annotations: + cloudpakId: c8b82d189e7545f0892db9ef2731b90d + productVersion: 11.5.1 + productID: 2a79e49111f44ec3acd89608e56138f5 + cloudpakName: IBM Cloud Pak for Integration + productChargedContainers: <name>-kafkaconnect-connect # Replace with the name of the kafka connect given under metadata + productCloudpakRatio: "2:1" + productName: IBM Event Streams for Non Production + eventstreams.production.type: CloudPakForIntegrationNonProduction + productMetric: VIRTUAL_PROCESSOR_CORE + volumes: + - name: connectivitypack-certs + secret: + secretName: <connectivitypack-name>-client-certificate # Replace with the name of the connectivity pack's client certificate secret + - name: salesforce-credential + secret: + secretName: <salesforce-credentials-name> # Replace with the name of the Salesforce credentials secret + tls: + trustedCertificates: + - certificate: ca.crt + secretName: <event-streams-instance-name>-cluster-ca-cert # Replace with the name of the Event Streams instance's CA certificate secret + build: + output: + image: <my-image-registry.my-kafka-connect-image:latest> # Replace with your image registry and image name + type: <image-type> # Specify the type of image (for example: docker) + plugins: + - artifacts: + - type: jar + url: <GitHub-URL-of-connectivity-pack-source-connector-<VERSION>-jar-with-dependencies.jar> # Replace with the URL to the connector jar file + name: connectivity-pack-source-connector + replicas: 1 diff --git a/examples/kafka-connector-source.yaml b/examples/kafka-connector-source.yaml new file mode 100644 index 0000000..49c4676 --- /dev/null +++ b/examples/kafka-connector-source.yaml @@ -0,0 +1,71 @@ +kind: Secret +apiVersion: v1 +metadata: + name: salesforce-credential + namespace: <namespace> +stringData: + username: <username> + password: <password> + clientIdentity: <clientIdentity> + clientSecret: <clientSecret> +type: Opaque + +--- + +apiVersion: eventstreams.ibm.com/v1beta2 +kind: KafkaConnector +metadata: + labels: + eventstreams.ibm.com/cluster: <name of Kafka Connect cluster> + name: <name of connector> + namespace: <namespace> +spec: + autoRestart: + # Enables automatic restarting of Kafka connectors in case of errors or failures. + # For more details, see: https://strimzi.io/blog/2023/01/25/auto-restarting-connectors/ + enabled: true + maxRestarts: <no. of times the connector attempts to restart in case of an error or failure> + + # Connector class name + class: com.ibm.eventstreams.connect.connectivitypacksource.ConnectivityPackSourceConnector + + # `tasksMax` should be equal to the number of object-eventType combinations + # In this example it is 4 (Test_task__e - CREATED, Order_Event__e - CREATED, OrderConfirmation__e - CREATED, CaseChangeEvent - UPDATED) + tasksMax: 4 + + config: + # Which data source to connect to eg. salesforce + connectivitypack.source: salesforce + + # URL to access the data source + connectivitypack.source.url: <URL of the data source instance> + + # Credentials to access the data source (This example shows auth-type - 'OAUTH2_PASSWORD') + connectivitypack.source.credentials.authType: <auth-type that the data source supports. eg. OAUTH2_PASSWORD> + connectivitypack.source.credentials.username: ${file:/mnt/salesforce-credential:username} + connectivitypack.source.credentials.password: ${file:/mnt/salesforce-credential:password} + connectivitypack.source.credentials.clientIdentity: ${file:/mnt/salesforce-credential:clientIdentity} + connectivitypack.source.credentials.clientSecret: ${file:/mnt/salesforce-credential:clientSecret} + + # Objects and associated events to read from the datasource + # For example: The config lists the source objects the connector will listen to. Each object corresponds to a data source object, + # such as Salesforce-style objects or custom events, that the connector processes. + connectivitypack.source.objects: 'Test_task__e, Order_Event__e, OrderConfirmation__e, CaseChangeEvent' + + # Specifies the events (for example, CREATED, UPDATED) to capture for the 'Test_task__e' object. + # The connector will process only events of the specified type for this object. + connectivitypack.source.Test_task__e.events: CREATED + + connectivitypack.source.Order_Event__e.events: CREATED + connectivitypack.source.OrderConfirmation__e.events: CREATED + connectivitypack.source.CaseChangeEvent.events: UPDATED + + # Optional, sets the format for Kafka topic names created by the connector. + # You can use placeholders like '${object}' and '${eventType}', which the connector will replace automatically. + # Including '${object}' or '${eventType}' in the format is optional. For example, '${object}-topic-name' is a valid format. + # By default, the format is '${object}-${eventType}', but it's shown here for clarity. + connectivitypack.topic.name.format: '${object}-${eventType}' + + # standard kafka connector properties + value.converter.schemas.enable: false + value.converter: org.apache.kafka.connect.json.JsonConverter diff --git a/ibm-connectivity-pack/.helmignore b/ibm-connectivity-pack/.helmignore new file mode 100644 index 0000000..fd7b143 --- /dev/null +++ b/ibm-connectivity-pack/.helmignore @@ -0,0 +1,24 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ +license/ diff --git a/ibm-connectivity-pack/Chart.yaml b/ibm-connectivity-pack/Chart.yaml new file mode 100644 index 0000000..db589a1 --- /dev/null +++ b/ibm-connectivity-pack/Chart.yaml @@ -0,0 +1,22 @@ +apiVersion: v2 +appVersion: 1.0.1 +description: Helm chart for IBM Connectivity Pack. The helm application is a Connectors as a Container offering that offers connectors as independently deployable application and provides a consistent interface for interaction. The application offers both action and event connectors. +keywords: + - ibm-connectivity-pack + - connector + - connectors + - connectivity + - action-connectors + - event-connectors + - actions + - events + - connector-runtime + - connectivityx + - connector-framework + - webhook-connectors + - polling-connectors + - event-polling +kubeVersion: '>=1.25.0' +name: ibm-connectivity-pack +type: application +version: 1.0.0 diff --git a/ibm-connectivity-pack/LICENSE b/ibm-connectivity-pack/LICENSE new file mode 100644 index 0000000..d63a0f0 --- /dev/null +++ b/ibm-connectivity-pack/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + © Copyright IBM Corporation. 2017, 2018 + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/ibm-connectivity-pack/README.md b/ibm-connectivity-pack/README.md new file mode 100644 index 0000000..b93f342 --- /dev/null +++ b/ibm-connectivity-pack/README.md @@ -0,0 +1,164 @@ +# Installing the IBM Connectivity Pack + +The Connectivity Pack acts as an interface between Kafka Connect connectors and external systems you want to connect to. It can be deployed on OpenShift and other Kubernetes platforms by using the Connectivity Pack Helm chart. + +## Prerequisites + +- OpenShift 4.12 or later +- Kubernetes version 1.25 or later +- Helm CLI 3.0 or later + +## Installing + +To install the Connectivity Pack, run the following command: + +```bash +helm install <RELEASE-NAME> <CONNECTIVITY-PACK-HELM-CHART-URL> --set license.licenseId=<LICENSE-ID>,license.accept=true -n <NAMESPACE> +``` + +Where: + +- `<RELEASE-NAME>` is the release name of your choice. For example, `ibm-connectivity-pack` +- `<CONNECTIVITY-PACK-HELM-CHART-URL>` is the URL of the latest version of the Connectivity Pack Helm chart. For example: `https://github.com/ibm-messaging/connectivity-pack-kafka-connectors/releases/download/1.0.0/ibm-connectivity-pack-1.0.0.tgz` +- `license.licenseId=<LICENSE-ID>` is the license identifier (ID) for the program that you purchased. For more information, see [licensing reference](https://ibm.github.io/event-automation/support/licensing/). +- `license.accept` determines whether the license is accepted (default is `false` if not specified). +- `<NAMESPACE>` is the namespace where the Connectivity Pack is deployed. + +You can override the default configuration parameters by using the `--set` flag or by using a custom YAML file. For example, to set the `replicaCount` as `3`, you can use `--set replicaCount=3`. + +For a complete list of configuration parameters supported by the helm chart, see [Configuring](#configuring). + +## Uninstalling + +To uninstall the Connectivity Pack by using the Helm chart, run the following command: + +```bash +helm uninstall <RELEASE-NAME> -n <NAMESPACE> +``` + +Where: + +- `<RELEASE-NAME>` is the release name of your Connectivity Pack installation. +- `<NAMESPACE>` is the namespace where the Connectivity Pack is deployed. + +## Configuring + +You can configure your installation by adding configurable parameters through the `--set` flag in your `helm install` command or by using a custom YAML file. + +The following table lists the configurable parameters of the Connectivity Pack Helm chart and their default values: + +| Parameter | Description | Default | +| -------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------- | +| license.accept | Indicates acceptance of license terms | false | +| license.licenseId | license identifier (ID) for the program that you purchased as per [licensing reference](https://ibm.github.io/event-automation/support/licensing/) | +| replicaCount | Number of replicas of the pod | 1 | +| bunyan | Log configuration for the system | See [Logging](#logging) and the sample [values.yaml](values.yaml) file for more information. | +| annotations | Override with product specific annotations | See [values.yaml](values.yaml) for more information. | +| environmentVariables | Yaml object of environment variables to be added in action and event services | +| image.registry | Image registry URL | cp.icr.io | +| image.path | Image namespace or the path under image registry before image name and digest | cp/ibm-eventstreams | +| image.imagePullSecretName | Kubernetes image pull secret if it already exists in the namespace, if not add the following image pull details to create new secret | ibm-entitlement-key | +| image.imagePullEmail | Image pull secret email ID | dummyEmail | +| image.imagePullUsername | Image pull username | iamapikey | +| image.imagePullPassword | Image pull password | dummyPassword | +| certificate.MTLSenable | Enable mTLS else fallback to TLS | true | +| certificate.generate | Generate new certificates for mTLS/TLS, this should be used for certificate rotation. However, if `certificate.serverSecretName` and `certificate.clientSecretName` are already specified, this option will be ignored. | true | +| certificate.clientSecretName | Already existing mTLS client certificate Kubernetes secret name, if left empty new certificate will be generated on helm install | +| certificate.clientCertPropertyName | Property name in secret which holds mTLS client certificate | 'tls.crt' | +| certificate.clientCertKeyPropertyName | Property name in secret which holds mTLS client certificate key | 'tls.key' | +| certificate.clientCertPKCSPropertyName | Property name in secret which holds PKCS12 client certificate | 'pkcs.p12' | +| certificate.pkcsPassword | PKCS12 file password | +| certificate.serverSecretName | Already existing mTLS/TLS server certificate Kubernetes secret name. If left empty, a new certificate will be generated during Connectivity Pack installation. | +| certificate.serverCertPropertyName | Property name in secret which holds mTLS/TLS server certificate | 'tls.crt' | +| certificate.serverCertKeyPropertyName | Property name in secret which holds mTLS/TLS server certificate key | 'tls.key' | +| certificate.caCertPropertyName | Property name in secret which holds certificate authority certificate | 'ca.crt' | +| route.enable | Enable OpenShift Route for external access update domain and make `certificate.generate` to true so that certificate has the domain entry, _Enable only for OpenShift cluster_ | false | +| route.domain | Domain or subdomain of cluster | 'example.com' | +| basicAuth.enable | Enable basic authentication for service | false | +| basicAuth.username | Basic auth username | csuser | +| preHook.image | Prehook job image name | connectivity-pack-prehook | +| preHook.digest | Prehook job image digest | sha256:a401875a35737d377e7a18753ba26a52999d8c060589256e32263008c5f19747 | +| proxy.image | Proxy service container image name | connectivity-pack-proxy | +| proxy.digest | Proxy service container image digest | sha256:8ee2b1fe96f00fb90bd25df122097f3653ba7b83eb6808a5a3971e21548a5fe4 | +| action.image | Action service container image name | action-connectors | +| action.digest | Action service container image digest | +| action.resources | Action service container resources Check Kubernetes deployment resources for more details | See [values.yaml](values.yaml) | +| event.enable | Enable event container | true | +| event.image | Event service container image name | event-connectors | +| event.digest | Event service container image digest | +| event.resources | Event service container resources Check Kubernetes deployment resources for more details | See [values.yaml](values.yaml) | +| javaservice.enable | Enable java-service container | false | +| javaservice.image | java-service container image name | connector-service-java | +| javaservice.digest | java-service container image digest | +| javaservice.resources | java-service container resources Check Kubernetes deployment resources for more details | See [values.yaml](values.yaml) | +| autoScaling.enable | Enable auto-scaling | false | +| autoScaling.minReplicas | Minimum replicas for auto-scaling | 1 | +| autoScaling.maxReplicas | Maximum replicas for auto-scaling | 5 | +| autoScaling.cpuUtilization | Target CPU utilization percentage for auto-scaling | 70 | +| autoScaling.memoryUtilization | Target memory utilization percentage for auto-scaling | 70 | + +### Configuring your mTLS + +The Helm chart supports both mTLS and TLS through `certificate.MTLSenable`: + +**mTLS Enabled:** Certificates are generated and stored in a Kubernetes secret. To regenerate certificates, set `certificate.generate` to `true`. + +**mTLS disabled:** If not enabled, the service defaults to TLS. + +### OpenShift Route + +To enable an OpenShift Route for external access, set `route.enable` to `true`. This exposes your system outside the cluster through an OpenShift route. + +### Basic authentication + +Basic authentication can be enabled for services by setting `basicAuth.enable` to `true`. + +### Auto-Scaling + +The chart supports horizontal pod auto-scaling based on CPU and memory utilization. Add and modify the following snippet in the `values.yaml` file: + +```yaml +autoScaling: + enable: true + minReplicas: 1 + maxReplicas: 5 + cpuUtilization: 70 + memoryUtilization: 70 +``` + +### Logging + +You can configure detailed logging by using the `bunyan` configuration in the `values.yaml` file. This supports various logging levels such as `info`, `debug`, and `trace`, and output formats. + +```js + { + "loglevel": 'trace'|'debug'|'info'|'warn'|'error'|'fatal', // default logging level of all streams (string) + "logsrc": true|false, // include the source filename and line-number (boolean) + "logstdout": { + "loglevel": 'trace'|'debug'|'info'|'warn'|'error'|'fatal' + }, + "logstdouttext": { + "ignoredlogsources": ["shoutyFile.js", "somedir/loudClass.js"] // ignore logs from files with a path matching any of these regexes (array of RegExp strings). This will set 'logsrc' to true" + }, + "logstdoutlogdna": {}, + "logfile": { + "loglevel": 'trace'|'debug'|'info'|'warn'|'error'|'fatal', + "filename": "mydir/myfile.log", // The path and name of a file to write logs to (string) + "rotate": true|false, // if true turns on bunyan 'rotating-file' log file rotation, (boolean - defaults to false) + "rotatecount": 3, // number of files for rotation if active, (number - defaults to 10) + "rotateperiod": "12h", // log rotation period for rotation if active, time duration eg '1d' or '6h', (string defaults to '1d' + }, + "logstdoutdashboard" : { + "logformat": "basic"|"json" // optional, default=basic + }, + logstdoutwo: {}, + "logdna": { + "url": 'https://logs.eu-de.logging.cloud.ibm.com/logs/ingest', // ingestion URL for the logDNA instance (string - found on the logDNA dashboard, under '?' (bottom left) > 'REST API' > The value MUST end with /ingest, trim off all the query parameters + "key": "abc123def456ghi789abc123def456gh", // ingestion key for the logDNA instance (string - found on the logDNA dashboard, under 'settings' > 'ORGANIZATION' > ' API Keys' + "flushlimit": 1000000, // maximum log buffer size (in bytes) allowed before which the buffer is automatically flushed (default=5000000) + "flushinterval": 100 // duration (in ms) to wait from the last flush before the next automated log buffer flush, when flushlimit isn't exceeded (default=250)" + } + } +``` + + diff --git a/ibm-connectivity-pack/connector-config.json b/ibm-connectivity-pack/connector-config.json new file mode 100644 index 0000000..0b9ddc6 --- /dev/null +++ b/ibm-connectivity-pack/connector-config.json @@ -0,0 +1,19 @@ +{ + "action-connectors": { + "tag": "1.0.0", + "digest": "sha256:4068f1e0ba627e8237c08c8e5a2ba62e3c0d1ab1fa3ca983b22adc7d427fbc62" + }, + "event-connectors": { + "tag": "1.0.0", + "digest": "sha256:1c575f3e7658c186723fb490c9513bcda666a4f823a2d56b11ae22ac1a6c37d3" + }, + "java-tech-connectors": {}, + "connectivity-pack-proxy": { + "tag": "1.0.0", + "digest": "sha256:8ee2b1fe96f00fb90bd25df122097f3653ba7b83eb6808a5a3971e21548a5fe4" + }, + "connectivity-pack-prehook": { + "tag": "1.0.0", + "digest": "sha256:a401875a35737d377e7a18753ba26a52999d8c060589256e32263008c5f19747" + } +} diff --git a/ibm-connectivity-pack/templates/InstallEAPreHook.yaml b/ibm-connectivity-pack/templates/InstallEAPreHook.yaml new file mode 100644 index 0000000..41d9bcd --- /dev/null +++ b/ibm-connectivity-pack/templates/InstallEAPreHook.yaml @@ -0,0 +1,32 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: {{ include "ibm-connectivity-pack.eaPreHookJob" . }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": pre-install, pre-upgrade + "helm.sh/hook-weight": "1" + "helm.sh/hook-delete-policy": hook-succeeded,hook-failed,before-hook-creation + {{- toYaml .Values.annotations | nindent 4 }} +spec: + activeDeadlineSeconds: 300 + template: + spec: + serviceAccountName: {{ include "ibm-connectivity-pack.preHookJob" . }}-sa + restartPolicy: Never + imagePullSecrets: + - name: {{ include "ibm-connectivity-pack.imagePullSecretname" . }} + containers: + - name: mtls-cert-generator + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: {{ .Values.image.registry }}/{{ .Values.image.path }}/{{ .Values.preHook.image }}@{{ .Values.preHook.digest }} + command: + - /bin/sh + - "-c" + - | + {{- if not .Values.license.licenseId }} + {{- fail " \nYou have not provided a valid license. To continue the installation, set 'license.licenseId' and provide a valid value from https://ibm.biz/ea-license." }} + {{- end }} + {{- include "ibm-connectivity-pack.validateLicense" (dict "licenseId" .Values.license.licenseId) }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/InstallPreHook.yaml b/ibm-connectivity-pack/templates/InstallPreHook.yaml new file mode 100644 index 0000000..26b7c99 --- /dev/null +++ b/ibm-connectivity-pack/templates/InstallPreHook.yaml @@ -0,0 +1,71 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: {{ include "ibm-connectivity-pack.preHookJob" . }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": pre-install, pre-upgrade + "helm.sh/hook-weight": "0" + "helm.sh/hook-delete-policy": hook-succeeded,hook-failed,before-hook-creation + {{- toYaml .Values.annotations | nindent 4 }} +spec: + activeDeadlineSeconds: 300 + template: + spec: + serviceAccountName: {{ include "ibm-connectivity-pack.preHookJob" . }}-sa + restartPolicy: Never + imagePullSecrets: + - name: {{ include "ibm-connectivity-pack.imagePullSecretname" . }} + containers: + - name: mtls-cert-generator + securityContext: + capabilities: + drop: + - ALL + privileged: false + runAsNonRoot: true + readOnlyRootFilesystem: false + allowPrivilegeEscalation: false + seccompProfile: + type: RuntimeDefault + image: {{ .Values.image.registry }}/{{ .Values.image.path }}/{{ .Values.preHook.image }}@{{ .Values.preHook.digest }} + command: + - /bin/sh + - "-c" + - | + {{- if not .Values.license.accept }} + {{- fail (printf "\nYou have not accepted the terms of the license. To continue the installation, accept the license by setting 'license.accept' to true.") }} + {{- end }} + {{- if and .Values.image.imagePullSecretName (not (lookup "v1" "Secret" .Release.Namespace .Values.image.imagePullSecretName)) }} + {{- fail (printf "\nFailed to find image pull secret '%s' in namespace '%s'" .Values.image.imagePullSecretName .Release.Namespace) }} + {{- end }} + {{- if and .Values.certificate.serverSecretName (not (lookup "v1" "Secret" .Release.Namespace .Values.certificate.serverSecretName)) }} + {{- fail (printf "\nFailed to find certificate secret '%s' in namespace '%s'" .Values.certificate.serverSecretName .Release.Namespace) }} + {{- end }} + {{- if and .Values.certificate.MTLSenable .Values.certificate.clientSecretName (not (lookup "v1" "Secret" .Release.Namespace .Values.certificate.clientSecretName)) }} + {{- fail (printf "\nFailed to find certificate secret '%s' in namespace '%s'" .Values.certificate.clientSecretName .Release.Namespace) }} + {{- end }} + {{- if and (not .Values.certificate.serverSecretName) (not .Values.certificate.clientSecretName) }} + export IS_CERT_GENERATE={{ .Values.certificate.generate }} + export COMMON_NAME={{ include "ibm-connectivity-pack.service" . }} + export DNS2={{ include "ibm-connectivity-pack.service" . }}.{{ .Release.Namespace }}.svc.cluster.local + export ROUTE_ENABLED={{ .Values.route.enable }} + export DNS3={{ .Values.route.domain }} + export RELEASE_NAME={{ .Release.Name }} + export CA_CERT_NAME={{ .Values.certificate.caCertPropertyName }} + export CLIENT_SECRET_NAME={{ include "ibm-connectivity-pack.stunnelClient" .}} + export CLIENT_CERT_NAME={{ .Values.certificate.clientCertPropertyName }} + export CLIENT_KEY_NAME={{ .Values.certificate.clientCertKeyPropertyName }} + export CLIENT_PKCS_P12={{ .Values.certificate.clientCertPKCSPropertyName }} + export SERVER_SECRET_NAME={{ include "ibm-connectivity-pack.stunnelServer" .}} + export SERVER_CERT_NAME={{ .Values.certificate.serverCertPropertyName }} + export SERVER_KEY_NAME={{ .Values.certificate.serverCertKeyPropertyName }} + export PKCS_PASSWORD={{ .Values.certificate.pkcsPassword }} + export MTLS_ENABLED={{ .Values.certificate.MTLSenable }} + sh createCert.sh + {{- range $key, $value := .Values.annotations }} + kubectl annotate secret $CLIENT_SECRET_NAME {{ $key }}="{{ $value }}" -n {{ $.Release.Namespace }} --overwrite + kubectl annotate secret $SERVER_SECRET_NAME {{ $key }}="{{ $value }}" -n {{ $.Release.Namespace }} --overwrite + {{- end }} + {{- end }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/NOTES.txt b/ibm-connectivity-pack/templates/NOTES.txt new file mode 100644 index 0000000..b1d5436 --- /dev/null +++ b/ibm-connectivity-pack/templates/NOTES.txt @@ -0,0 +1,10 @@ + +IBM Connectivity Pack has been installed successfully. To access securely, configure Kafka Connect custom resource to use: + +- configuration from the ConfigMap: {{ include "ibm-connectivity-pack.config" . }} +{{- if .Values.certificate.MTLSenable }} +- mTLS certificates from the secret: {{ include "ibm-connectivity-pack.stunnelClient" . }} +{{- end }} +{{- if .Values.basicAuth.enable }} +- Basic authentication credentials from the secret: {{ include "ibm-connectivity-pack.basicAuthCreds" . }} +{{- end }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/_helpers.tpl b/ibm-connectivity-pack/templates/_helpers.tpl new file mode 100644 index 0000000..4aaa87f --- /dev/null +++ b/ibm-connectivity-pack/templates/_helpers.tpl @@ -0,0 +1,294 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "ibm-connectivity-pack.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "ibm-connectivity-pack.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "ibm-connectivity-pack.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "ibm-connectivity-pack.labels" -}} +helm.sh/chart: {{ include "ibm-connectivity-pack.chart" . }} +{{ include "ibm-connectivity-pack.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/rand-key: {{randAlphaNum 13 | nospace}} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "ibm-connectivity-pack.selectorLabels" -}} +app.kubernetes.io/name: {{ include "ibm-connectivity-pack.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the deployment +*/}} +{{- define "ibm-connectivity-pack.deploymentName" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-deployment +{{- end }} +{{- end }} + +{{/* +Create the name of the serviceAccount +*/}} +{{- define "ibm-connectivity-pack.serviceAccountName" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-service-account +{{- end }} +{{- end }} + +{{/* +Create the name of the config-map +*/}} +{{- define "ibm-connectivity-pack.configMap" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-configmap +{{- end }} +{{- end }} + +{{/* +Create the name of the token store secret +*/}} +{{- define "ibm-connectivity-pack.tokenStore" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-ts +{{- end }} +{{- end }} + +{{/* +Create the name of the basic auth secret +*/}} +{{- define "ibm-connectivity-pack.basicAuthCreds" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-creds +{{- end }} +{{- end }} + +{{/* +Create the name of the basic auth password +*/}} +{{- define "ibm-connectivity-pack.basicAuthPassword" -}} +{{- if .Release.Name }} +{{- randAlphaNum 13 | nospace -}} +{{- end }} +{{- end }} + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.service" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-service +{{- end }} +{{- end }} + +{{/* +Create the event service route +*/}} +{{- define "ibm-connectivity-pack.eventServiceRoute" -}} +{{- printf "https://%s:3004" (include "ibm-connectivity-pack.service" .) }} +{{- end }} + +{{/* +Create the action service route +*/}} +{{- define "ibm-connectivity-pack.actionServiceRoute" -}} +{{- printf "https://%s:3001" (include "ibm-connectivity-pack.service" .) }} +{{- end }} + +{{/* +Create the webhook service route +*/}} +{{- define "ibm-connectivity-pack.webhookServiceRoute" -}} +{{- printf "https://%s:3009" (include "ibm-connectivity-pack.service" .) }} +{{- end }} + +{{/* +Create the mutal auth service route +*/}} +{{- define "ibm-connectivity-pack.mutualAuthServiceRoute" -}} +{{- printf "https://%s" (include "ibm-connectivity-pack.service" .) }} +{{- end }} + +{{/* +Create the java service route +*/}} +{{- define "ibm-connectivity-pack.javaServiceRoute" -}} +{{- printf "http://%s:9080/connector-java-services/_lcp_jdbc_connect" (include "ibm-connectivity-pack.service" .) }} +{{- end }} + +{{/* +Create the role +*/}} +{{- define "ibm-connectivity-pack.role" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-role +{{- end }} +{{- end }} + +{{/* +Create the rolebinding +*/}} +{{- define "ibm-connectivity-pack.rolebinding" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-rolebinding +{{- end }} +{{- end }} + +{{/* +Create the networkpolicy +*/}} +{{- define "ibm-connectivity-pack.networkpolicy" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-networkpolicy +{{- end }} +{{- end }} + +{{/* +Create the name for image pull secret name +*/}} +{{- define "ibm-connectivity-pack.imagePullSecretname" -}} +{{- if eq .Values.image.imagePullSecretName "" }} +{{- default .Release.Name }}-image-pull-cred +{{ else }} +{{- .Values.image.imagePullSecretName }} +{{- end }} +{{- end }} + +{{/* +Create the name proxy config map name +*/}} +{{- define "ibm-connectivity-pack.proxy" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-proxy +{{- end }} +{{- end }} + +{{/* +Create the name for image pull secret +*/}} +{{- define "ibm-connectivity-pack.imagePullSecret" -}} +{{- if eq .Values.image.imagePullSecretName "" }} +{{- printf "{\"%s\":{\"username\": \"%s\", \"password\": \"%s\", \"email\": \"%s\" }}" .Values.image.registry .Values.image.imagePullUsername .Values.image.imagePullPassword .Values.image.imagePullEmail }} +{{- end }} +{{- end }} + +{{/* +Create the name for stunnel server cert secret +*/}} +{{- define "ibm-connectivity-pack.stunnelServer" -}} +{{- if .Values.certificate.serverSecretName }} +{{- .Values.certificate.serverSecretName }} +{{- else }} +{{- default .Release.Name }}-server-certificate +{{- end }} +{{- end }} + +{{/* +Create the name for stunnel client cert secret +*/}} +{{- define "ibm-connectivity-pack.stunnelClient" -}} +{{- if .Values.certificate.clientSecretName }} +{{- .Values.certificate.clientSecretName }} +{{- else }} +{{- default .Release.Name }}-client-certificate +{{- end }} +{{- end }} + + +{{/* +Create the name for Horizontal Pod Autoscaler +*/}} +{{- define "ibm-connectivity-pack.hpa" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-hpa +{{- end }} +{{- end }} + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.preHookJob" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-prehook-job +{{- end }} +{{- end }} + + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.preHookJobSa" -}} +{{- if .Release.Name }} +{{- include "ibm-connectivity-pack.preHookJob" . }}-sa +{{- end }} +{{- end }} + + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.preHookJobRole" -}} +{{- if .Release.Name }} +{{- include "ibm-connectivity-pack.preHookJob" . }}-creator +{{- end }} +{{- end }} + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.preHookJobRoleBinding" -}} +{{- if .Release.Name }} +{{- include "ibm-connectivity-pack.preHookJob" . }}-creator-binding +{{- end }} +{{- end }} + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.config" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-config +{{- end }} +{{- end }} + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.envConfig" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-env-config +{{- end }} +{{- end }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/_helpersEA.tpl b/ibm-connectivity-pack/templates/_helpersEA.tpl new file mode 100644 index 0000000..2be0780 --- /dev/null +++ b/ibm-connectivity-pack/templates/_helpersEA.tpl @@ -0,0 +1,107 @@ +{{/* +Define the name of the Event Automation Helm Chart Prehook Job for validating the license +*/}} +{{- define "ibm-connectivity-pack.eaPreHookJob" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-ea-prehook-job +{{- end }} +{{- end }} + +{{/* +Define the name of the Event Automation Helm Chart Prehook Job for validating the license +*/}} +{{- define "ibm-connectivity-pack.eaPostHookJob" -}} +{{- if .Release.Name }} +{{- default .Release.Name }}-ea-posthook-job +{{- end }} +{{- end }} + + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.eaPostHookJobSa" -}} +{{- if .Release.Name }} +{{- include "ibm-connectivity-pack.eaPostHookJob" . }}-sa +{{- end }} +{{- end }} + + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.eaPostHookJobRole" -}} +{{- if .Release.Name }} +{{- include "ibm-connectivity-pack.eaPostHookJob" . }}-creator +{{- end }} +{{- end }} + +{{/* +Create the name of the service +*/}} +{{- define "ibm-connectivity-pack.eaPostHookJobRoleBinding" -}} +{{- if .Release.Name }} +{{- include "ibm-connectivity-pack.eaPostHookJob" . }}-creator-binding +{{- end }} +{{- end }} + + +# Function for fetching the license +{{- define "ibm-connectivity-pack.fetchLicense" -}} + +{{- $licenseListCP4I := list "L-QYVA-B365MB" "L-JVML-UFQVM4" -}} +{{- $licenseListEA := list "L-AUKS-FKVXVL" -}} + +{{- $licenseId := .licenseId | quote -}} + +{{- $foundCP4ILicense := false }} +{{- $foundEALicense := false }} +{{- $licenseType := "" }} + +{{- range $licenseListCP4I }} + {{- if eq (quote .) $licenseId }} + {{- $foundCP4ILicense = true }} + {{- $licenseType = "CP4I" }} + {{- end }} +{{- end }} + +{{- range $licenseListEA }} + {{- if eq (quote .) $licenseId }} + {{- $foundEALicense = true }} + {{- $licenseType = "EA" }} + {{- end }} +{{- end }} + +{{- $licenseType }} + +{{- end }} + + +# Function for validating the license +{{- define "ibm-connectivity-pack.validateLicense" -}} + +{{- $licenseListCP4I := list "L-QYVA-B365MB" "L-JVML-UFQVM4" -}} +{{- $licenseListEA := list "L-AUKS-FKVXVL" -}} + +{{- $licenseId := .licenseId | quote -}} + +{{- $foundCP4ILicense := false }} +{{- $foundEALicense := false }} + +{{- range $licenseListCP4I }} + {{- if eq (quote .) $licenseId }} + {{- $foundCP4ILicense = true }} + {{- end }} +{{- end }} + +{{- range $licenseListEA }} + {{- if eq (quote .) $licenseId }} + {{- $foundEALicense = true }} + {{- end }} +{{- end }} + +{{- if not (or $foundCP4ILicense $foundEALicense )}} + {{- fail (printf "\nYou have provided an invalid license: %s.\nTo continue the installation, set 'license.licenseId' and provide a valid value from https://ibm.biz/ea-license.\nValid Event Automation licenses are:\n %s\nValid Cloud Pak for Integration licenses are:\n %s" $licenseId $licenseListEA $licenseListCP4I) }} +{{- end }} + +{{- end }} diff --git a/ibm-connectivity-pack/templates/creds.yaml b/ibm-connectivity-pack/templates/creds.yaml new file mode 100644 index 0000000..15c0402 --- /dev/null +++ b/ibm-connectivity-pack/templates/creds.yaml @@ -0,0 +1,15 @@ +{{- if .Values.basicAuth.enable -}} +kind: Secret +apiVersion: v1 +metadata: + name: {{ include "ibm-connectivity-pack.basicAuthCreds" . }} + namespace: {{ .Release.Namespace }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} +data: + password: {{ include "ibm-connectivity-pack.basicAuthPassword" . | b64enc }} + username: {{ .Values.basicAuth.username | b64enc }} +type: kubernetes.io/basic-auth +{{ end }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/csConfig.yaml b/ibm-connectivity-pack/templates/csConfig.yaml new file mode 100644 index 0000000..898290d --- /dev/null +++ b/ibm-connectivity-pack/templates/csConfig.yaml @@ -0,0 +1,11 @@ +kind: ConfigMap +apiVersion: v1 +metadata: + name: {{ include "ibm-connectivity-pack.config" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +data: + CONNECTIVITYPACK_SERVICE_URL: {{ include "ibm-connectivity-pack.service" . }}.{{ .Release.Namespace }}.svc.cluster.local \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/deployment.yaml b/ibm-connectivity-pack/templates/deployment.yaml new file mode 100644 index 0000000..f53e7e9 --- /dev/null +++ b/ibm-connectivity-pack/templates/deployment.yaml @@ -0,0 +1,396 @@ +kind: Deployment +apiVersion: apps/v1 +metadata: + name: {{ include "ibm-connectivity-pack.deploymentName" . }} + namespace: {{ .Release.Namespace }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: + {{- include "ibm-connectivity-pack.selectorLabels" . | nindent 6 }} + template: + metadata: + creationTimestamp: null + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 8 }} + spec: + restartPolicy: Always + serviceAccountName: {{ include "ibm-connectivity-pack.serviceAccountName" .}} + terminationGracePeriodSeconds: 60 + imagePullSecrets: + - name: {{ include "ibm-connectivity-pack.imagePullSecretname" . }} + containers: + - name: {{ .Values.action.name }} + resources: + {{- toYaml .Values.action.resources | nindent 12 }} + readinessProbe: + httpGet: + path: /admin/ready + port: 3020 + scheme: HTTP + timeoutSeconds: 30 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + startupProbe: + httpGet: + path: /admin/ready + port: 3020 + scheme: HTTP + timeoutSeconds: 1 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 120 + livenessProbe: + httpGet: + path: /admin/ready + port: 3020 + scheme: HTTP + timeoutSeconds: 30 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 3 + envFrom: + - configMapRef: + optional: true + name: {{ include "ibm-connectivity-pack.envConfig" . }} + env: + - name: ENABLE_INMEMORY_ACCOUNT_STORE + value: 'true' + - name: CONNECTOR_SERVICE_PORT + value: "3020" + - name: FEATURE_TOGGLES_OVERRIDE + value: "{\"epic3633-stateless-account\": 1, \"epic3627-ea-connector-service\": 1 }" + - name: LCP_HTTP_PORT + value: "3020" + - name: FIREFLY_DESIGNER_RUNTIME_MODE + value: "ACP" + - name: FIREFLY_ROUTE_EVENTS_CONNECTOR_PROVIDER + value: {{ include "ibm-connectivity-pack.eventServiceRoute" .}} + - name: OPENAPI_FILES_MOUNT_DIRECTORY + value: "./connectors" + - name: APPCONNECT_PREMIUM_CONNECTOR_MOUNTPATH + value: "./mountPath" + - name: CONNECTOR_SERVICE + value: 'true' + - name: DESIGNER_FLOWS_OPERATION_MODE + value: local + - name: DEVELOPMENT_MODE + value: 'false' + - name: FIREFLY_USERID + value: default + - name: APPLICATION_MEM_LIMIT + value: {{ .Values.action.resources.limits.memory }} + - name: bunyan + value: {{ .Values.bunyan }} + - name: CONNECTOR_ACCOUNT + value: /opt/ibm/app/accounts + - name: NAMESPACE + value: {{ .Release.Namespace }} + - name: TOKEN_STORE_SECRET_NAME + value: {{ include "ibm-connectivity-pack.tokenStore" .}} + - name: MQSI_DISABLE_SALESFORCE_CONNECTOR + value: '1' + - name: FIREFLY_ROUTE_LOOPBACK_CONNECTOR_PROVIDER + value: {{ include "ibm-connectivity-pack.actionServiceRoute" .}} + - name: SERVICE_NAME + value: {{ .Values.action.name }} + - name: WORKING_DIRECTORY + value: /opt/ibm/app/workdir + - name: CONNECTOR_SERVICE_AUTH_CREDS_FILE + value: "/opt/ibm/app/creds" + - name: CONNECTOR_SERVICE_EVENTS_ALLOWLIST + value: {{- toYaml .Values.csCommon.eventList | indent 2 }} + - name: CONNECTOR_SERVICE_EVENTS + value: {{ .Values.event.enable | quote }} + - name: SUBSCRIBE_PERSISTENCE + value: k8s + - name: MUTUAL_AUTH + value: {{ include "ibm-connectivity-pack.mutualAuthServiceRoute" .}} + - name: CONNECTOR_JAVA_SERVICE + value: {{ include "ibm-connectivity-pack.javaServiceRoute" .}} + - name: TECH_CONN_CCT_MODE + value: development_mode + - name: POD_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.name + - name: POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + imagePullPolicy: IfNotPresent + volumeMounts: + - name: tmp + mountPath: /tmp + - name: workdir + readOnly: true + mountPath: /opt/ibm/app/workdir + {{ if .Values.certificate.MTLSenable -}} + - name: stunnel-client + readOnly: true + mountPath: /opt/ibm/app/ssl + {{ end }} + {{ if .Values.basicAuth.enable -}} + - name: cred + readOnly: true + mountPath: "/opt/ibm/app/creds" + {{ end }} + image: {{ .Values.image.registry }}/{{ .Values.image.path }}/{{ .Values.action.image }}@{{ .Values.action.digest }} + {{ if .Values.event.enable -}} + - name: {{ .Values.event.name }} + resources: + {{- toYaml .Values.event.resources | nindent 12 }} + readinessProbe: + exec: + command: + - /readiness/readyECP.sh + timeoutSeconds: 4 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 1 + startupProbe: + exec: + command: + - /readiness/readyECP.sh + timeoutSeconds: 1 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 120 + livenessProbe: + exec: + command: + - /readiness/readyECP.sh + timeoutSeconds: 5 + periodSeconds: 10 + successThreshold: 1 + failureThreshold: 1 + envFrom: + - configMapRef: + optional: true + name: {{ include "ibm-connectivity-pack.envConfig" . }} + env: + - name: ENABLE_INMEMORY_ACCOUNT_STORE + value: 'true' + - name: FIREFLY_ROUTE_LOOPBACK_CONNECTOR_PROVIDER + value: {{ include "ibm-connectivity-pack.actionServiceRoute" .}} + - name: FEATURE_TOGGLES_OVERRIDE + value: "{\"epic3633-stateless-account\": 1, \"epic3627-ea-connector-service\": 1 }" + - name: CONNECTOR_SERVICE_EVENTS + value: {{ .Values.event.enable | quote }} + - name: DESIGNER_FLOWS_OPERATION_MODE + value: local + - name: FIREFLY_USERID + value: default + - name: NAMESPACE + value: {{ .Release.Namespace }} + - name: SERVICE_NAME + value: {{ include "ibm-connectivity-pack.service" .}} + - name: ECP_SERVICE_ROUTE + value: {{ include "ibm-connectivity-pack.eventServiceRoute" .}} + - name: WCP_IPC_PATH + value: "3009" + - name: WEBHOOK_PROVIDER_BASEURL + value: {{ include "ibm-connectivity-pack.webhookServiceRoute" .}} + - name: LEADER_ELECTION_PORT + value: "3003" + - name: CS_WEB_SOCKET_SERVER_PORT + value: "3022" + - name: APPLICATION_MEM_LIMIT + value: {{ .Values.event.resources.limits.memory }} + - name: WORKING_DIRECTORY + value: /opt/ibm/app/workdir + - name: CONNECTOR_SERVICE_EVENTS_ALLOWLIST + value: {{- toYaml .Values.csCommon.eventList | indent 2 }} + - name: CONNECTOR_SERVICE_ENABLE_WS + value: 'true' + - name: CONNECTOR_ACCOUNT + value: /opt/ibm/app/accounts + - name: POD_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.name + - name: POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: SERVER_NAME + value: {{ .Release.Name }} + - name: bunyan + value: {{ .Values.bunyan }} + - name: TOKEN_STORE_SECRET_NAME + value: {{ include "ibm-connectivity-pack.tokenStore" .}} + - name: MQSI_DISABLE_SALESFORCE_CONNECTOR + value: '1' + - name: DEVELOPMENT_MODE + value: 'false' + - name: SUBSCRIBE_PERSISTENCE + value: k8s + - name: SINGLE_REPLICA + value: 'true' + - name: CONNECTOR_SERVICE_AUTH_CREDS_FILE + value: "/opt/ibm/app/creds" + - name: CONNECTOR_SERVICE_ENABLE_WS_PRODUCER + value: 'true' + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + ports: + - name: ecpleadelect + containerPort: 3003 + protocol: UDP + - name: webhook + containerPort: 3009 + protocol: TCP + imagePullPolicy: IfNotPresent + volumeMounts: + - name: tmp + mountPath: /tmp + - name: workdir + readOnly: true + mountPath: /opt/ibm/app/workdir + {{ if .Values.certificate.MTLSenable -}} + - name: stunnel-client + readOnly: true + mountPath: /opt/ibm/app/ssl + {{ end }} + {{ if .Values.basicAuth.enable -}} + - name: cred + readOnly: true + mountPath: "/opt/ibm/app/creds" + {{ end }} + image: {{ .Values.image.registry }}/{{ .Values.image.path }}/{{ .Values.event.image }}@{{ .Values.event.digest }} + {{ end }} + {{ if .Values.javaservice.enable -}} + - name: {{ .Values.javaservice.name }} + resources: + {{- toYaml .Values.javaservice.resources | nindent 12 }} + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + env: + - name: NAMESPACE + value: {{ .Release.Namespace }} + ports: + - name: javaservice + containerPort: 9080 + protocol: TCP + imagePullPolicy: IfNotPresent + image: {{ .Values.image.registry }}/{{ .Values.image.path }}/{{ .Values.javaservice.image }}@{{ .Values.javaservice.digest }} + {{ end }} + - name: {{ .Values.proxy.name }} + resources: # Update below value for vertical scaling of container + limits: + cpu: '1' + ephemeral-storage: 10Gi + memory: 512Mi + requests: + cpu: 100m + ephemeral-storage: 150Mi + memory: 256Mi + readinessProbe: + exec: + command: + - /readiness/ready.sh + timeoutSeconds: 3 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 1 + livenessProbe: + exec: + command: + - /readiness/ready.sh + timeoutSeconds: 3 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 1 + startupProbe: + exec: + command: + - /readiness/ready.sh + timeoutSeconds: 1 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 120 + terminationMessagePath: /dev/termination-log + lifecycle: + preStop: + exec: + command: + - kill `cat /tmp/haproxy.pid` + command: + - /bin/stunnel + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + ports: + - name: https-port + containerPort: 3001 + protocol: TCP + - name: https-port-ecp + containerPort: 3004 + protocol: TCP + - name: https-webhook + containerPort: 3008 + protocol: TCP + - name: https-websocket + containerPort: 3022 + protocol: TCP + imagePullPolicy: IfNotPresent + volumeMounts: + - name: tmp + mountPath: /tmp + - name: stunnel-server + mountPath: /etc/stunnel/secrets + - name: proxy + mountPath: /etc/stunnel + terminationMessagePolicy: File + image: {{ .Values.image.registry }}/{{ .Values.image.path }}/{{ .Values.proxy.image }}@{{ .Values.proxy.digest }} + args: + - /etc/stunnel/stunnel.conf + serviceAccount: {{ include "ibm-connectivity-pack.serviceAccountName" .}} + volumes: + - name: tmp + emptyDir: + sizeLimit: 10Gi + - name: workdir + emptyDir: {} + - name: cred + secret: + secretName: {{ include "ibm-connectivity-pack.basicAuthCreds" . }} + optional: true + - name: proxy + configMap: + name: {{ include "ibm-connectivity-pack.proxy" . }} + - name: stunnel-server + secret: + secretName: {{ include "ibm-connectivity-pack.stunnelServer" . }} + items: + - key: {{ .Values.certificate.serverCertKeyPropertyName }} + path: server.key.pem + - key: {{ .Values.certificate.serverCertPropertyName }} + path: server.cert.pem + - key: {{ .Values.certificate.caCertPropertyName }} + path: server.ca.pem + {{ if .Values.certificate.MTLSenable -}} + - name: stunnel-client + secret: + secretName: {{ include "ibm-connectivity-pack.stunnelClient" . }} + items: + - key: {{ .Values.certificate.caCertPropertyName }} + path: stunnel.ca.pem + - key: {{ .Values.certificate.clientCertPropertyName }} + path: stunnel.cert.pem + - key: {{ .Values.certificate.clientCertKeyPropertyName }} + path: stunnel.key.pem + {{- end }} + strategy: + type: RollingUpdate + rollingUpdate: + maxUnavailable: 25% + maxSurge: 25% diff --git a/ibm-connectivity-pack/templates/earole.yaml b/ibm-connectivity-pack/templates/earole.yaml new file mode 100644 index 0000000..c4debc5 --- /dev/null +++ b/ibm-connectivity-pack/templates/earole.yaml @@ -0,0 +1,25 @@ +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + name: {{ include "ibm-connectivity-pack.eaPostHookJobRole" . }} + annotations: + "helm.sh/hook": post-install,post-upgrade + "helm.sh/hook-weight": "-1" + "helm.sh/hook-delete-policy": hook-succeeded,hook-failed,before-hook-creation + {{- toYaml .Values.annotations | nindent 4 }} +rules: + - apiGroups: ["apps"] # For Deployment objects + resources: ["deployments"] + verbs: ["get", "list", "watch", "update", "patch"] # Permissions for editing Deployments + - apiGroups: [""] # For Pods or other core resources if needed + resources: ["pods"] + verbs: ["get", "list", "patch", "update"] + - apiGroups: [ "" ] + resources: [ "secrets" ] + verbs: [ "create", "get", "list", "update", "patch" ] + - apiGroups: [ "" ] + resources: [ "configmaps" ] + verbs: [ "create", "get", "list", "update", "patch" ] \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/earolebinding.yaml b/ibm-connectivity-pack/templates/earolebinding.yaml new file mode 100644 index 0000000..8da3d05 --- /dev/null +++ b/ibm-connectivity-pack/templates/earolebinding.yaml @@ -0,0 +1,20 @@ +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: {{ include "ibm-connectivity-pack.eaPostHookJobRoleBinding" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": post-install,post-upgrade + "helm.sh/hook-weight": "-1" + "helm.sh/hook-delete-policy": hook-succeeded,hook-failed,before-hook-creation + {{- toYaml .Values.annotations | nindent 4 }} +subjects: + - kind: ServiceAccount + name: {{ include "ibm-connectivity-pack.eaPostHookJobSa" . }} + namespace: {{ .Release.Namespace }} +roleRef: + kind: Role + name: {{ include "ibm-connectivity-pack.eaPostHookJobRole" . }} + apiGroup: rbac.authorization.k8s.io diff --git a/ibm-connectivity-pack/templates/easerviceaccount.yaml b/ibm-connectivity-pack/templates/easerviceaccount.yaml new file mode 100644 index 0000000..f0cc211 --- /dev/null +++ b/ibm-connectivity-pack/templates/easerviceaccount.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + annotations: + "helm.sh/hook": post-install,post-upgrade + "helm.sh/hook-weight": "-1" + "helm.sh/hook-delete-policy": hook-succeeded,hook-failed,before-hook-creation + {{- toYaml .Values.annotations | nindent 4 }} + name: {{ include "ibm-connectivity-pack.eaPostHookJobSa" . }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/envConfigMap.yaml b/ibm-connectivity-pack/templates/envConfigMap.yaml new file mode 100644 index 0000000..4e4b10a --- /dev/null +++ b/ibm-connectivity-pack/templates/envConfigMap.yaml @@ -0,0 +1,13 @@ +{{- if not (eq (toJson .Values.environmentVariables) "{}") }} +kind: ConfigMap +apiVersion: v1 +metadata: + name: {{ include "ibm-connectivity-pack.envConfig" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +data: + {{- toYaml .Values.environmentVariables | nindent 2 }} +{{ end }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/hpa.yaml b/ibm-connectivity-pack/templates/hpa.yaml new file mode 100644 index 0000000..81c905e --- /dev/null +++ b/ibm-connectivity-pack/templates/hpa.yaml @@ -0,0 +1,44 @@ +{{ if .Values.autoScaling.enable -}} +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: {{ include "ibm-connectivity-pack.hpa" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +spec: + scaleTargetRef: + kind: Deployment + apiVersion: apps/v1 + name: {{ include "ibm-connectivity-pack.deploymentName" . }} + minReplicas: {{ .Values.autoScaling.minReplicas }} + maxReplicas: {{ .Values.autoScaling.maxReplicas }} + behavior: + scaleUp: + stabilizationWindowSeconds: 30 + policies: + - type: Pods + value: 1 + periodSeconds: 30 + scaleDown: + stabilizationWindowSeconds: 30 + policies: + - type: Pods + value: 1 + periodSeconds: 30 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: {{ .Values.autoScaling.cpuUtilization }} + - type: Resource + resource: + name: memory + target: + type: Utilization + averageUtilization: {{ .Values.autoScaling.memoryUtilization }} +{{ end }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/imagePullSecret.yaml b/ibm-connectivity-pack/templates/imagePullSecret.yaml new file mode 100644 index 0000000..1c76389 --- /dev/null +++ b/ibm-connectivity-pack/templates/imagePullSecret.yaml @@ -0,0 +1,15 @@ +{{- if eq .Values.image.imagePullSecretName "" }} +kind: Secret +apiVersion: v1 +metadata: + name: {{ include "ibm-connectivity-pack.imagePullSecretname" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +data: + .dockercfg: >- + {{- include "ibm-connectivity-pack.imagePullSecret" . | b64enc | nindent 4 }} +type: kubernetes.io/dockercfg +{{ end }} diff --git a/ibm-connectivity-pack/templates/installEAPostHook.yaml b/ibm-connectivity-pack/templates/installEAPostHook.yaml new file mode 100644 index 0000000..e92a52c --- /dev/null +++ b/ibm-connectivity-pack/templates/installEAPostHook.yaml @@ -0,0 +1,77 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: {{ include "ibm-connectivity-pack.eaPostHookJob" . }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": post-install,post-upgrade + "helm.sh/hook-weight": "0" + "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded + {{- toYaml .Values.annotations | nindent 4 }} +spec: + activeDeadlineSeconds: 300 + template: + spec: + serviceAccountName: {{ include "ibm-connectivity-pack.eaPostHookJob" . }}-sa + restartPolicy: Never + imagePullSecrets: + - name: {{ include "ibm-connectivity-pack.imagePullSecretname" . }} + containers: + - name: mtls-cert-generator + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: {{ .Values.image.registry }}/{{ .Values.image.path }}/{{ .Values.preHook.image }}@{{ .Values.preHook.digest }} + command: + - /bin/sh + - "-c" + - | + kubectl patch configMap {{ include "ibm-connectivity-pack.config" .}} \ + -n {{ .Release.Namespace }} \ + --patch '{"metadata":{"labels":{"eventstreams.ibm.com/name":"{{ include "ibm-connectivity-pack.config" . }}"}}}' + kubectl patch secret {{ include "ibm-connectivity-pack.stunnelClient" .}} \ + -n {{ .Release.Namespace }} \ + --patch '{"metadata":{"labels":{"eventstreams.ibm.com/name":"{{ include "ibm-connectivity-pack.stunnelClient" . }}"}}}' + kubectl patch configMap {{ include "ibm-connectivity-pack.proxy" .}} \ + -n {{ .Release.Namespace }} \ + --patch '{"metadata":{"labels":{"eventstreams.ibm.com/name":"{{ include "ibm-connectivity-pack.proxy" . }}"}}}' + + kubectl annotate deployment {{ include "ibm-connectivity-pack.deploymentName" . }} productChargedContainers='' + kubectl annotate deployment {{ include "ibm-connectivity-pack.deploymentName" . }} productMetric='FREE' + + {{- $productID := "" }} + {{- $productName := "" }} + {{- if .Values.license.licenseId }} + {{- $licenseType := include "ibm-connectivity-pack.fetchLicense" (dict "licenseId" .Values.license.licenseId) }} + {{- if eq $licenseType "CP4I" }} + {{- $productID = "2cba508800504d0abfa48a0e2c4ecbe2" }} + {{- $productName = "IBM Event Streams" }} + {{- else if eq $licenseType "EA" }} + {{- $productID = "682b6db3fed247a098d85da5ab905b46" }} + {{- $productName = "IBM Event Automation" }} + {{- else }} + {{- fail " \nYou have not provided a valid license. To continue the installation, set 'license.licenseId' and provide a valid value from https://ibm.biz/ea-license." }} + {{- end }} + {{- end }} + kubectl annotate deployment {{ include "ibm-connectivity-pack.deploymentName" . }} productName="{{ $productName }}" + kubectl annotate deployment {{ include "ibm-connectivity-pack.deploymentName" . }} productID="{{ $productID }}" + + kubectl patch deployment {{ include "ibm-connectivity-pack.deploymentName" . }} \ + -n {{ .Release.Namespace }} \ + --patch '{ + "spec": { + "template": { + "metadata": { + "annotations": { + "productID": "{{ $productID }}", + "productName": "{{ $productName }}", + "productChargedContainers":"", + "productMetric":"FREE" + }, + "labels": { + "eventstreams.ibm.com/name": "{{ include "ibm-connectivity-pack.deploymentName" . }}" + } + } + } + } + }' \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/proxy.yaml b/ibm-connectivity-pack/templates/proxy.yaml new file mode 100644 index 0000000..f53011e --- /dev/null +++ b/ibm-connectivity-pack/templates/proxy.yaml @@ -0,0 +1,84 @@ +kind: ConfigMap +apiVersion: v1 +metadata: + name: {{ include "ibm-connectivity-pack.proxy" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +data: + {{ if .Values.certificate.MTLSenable -}} + stunnel.conf: |- + ; ************************************************************************** + ; * Global options * + ; ************************************************************************** + pid = /tmp/haproxy.pid + foreground = yes + ; ************************************************************************** + ; * Service defaults * + ; ************************************************************************** + cert =/etc/stunnel/secrets/server.cert.pem + key =/etc/stunnel/secrets/server.key.pem + CAfile =/etc/stunnel/secrets/server.ca.pem + ; Allow only TLS, thus avoiding SSL + sslVersion = TLSv1.2 + socket = l:TCP_NODELAY=1 + socket = r:TCP_NODELAY=1 + verify = 2 + TIMEOUTclose = 0 + ; ************************************************************************** + ; * Services * + ; ************************************************************************** + [proxy] + accept = 3001 + connect = /tmp/lcp.socket + [ecpproxy] + accept = 3004 + connect = /tmp/ecp.socket + [fbcproxy] + accept = 3006 + connect=localhost:3005 + [webhookproxy] + verify = 1 + accept = 3008 + connect = localhost:3009 + [wsproxy] + accept = 3042 + connect = localhost:3022 + {{ else }} + stunnel.conf: |- + ; ************************************************************************** + ; * Global options * + ; ************************************************************************** + pid = /tmp/haproxy.pid + foreground = yes + ; ************************************************************************** + ; * Service defaults * + ; ************************************************************************** + cert =/etc/stunnel/secrets/server.cert.pem + key =/etc/stunnel/secrets/server.key.pem + ; Allow only TLS, thus avoiding SSL + sslVersion = TLSv1.2 + socket = l:TCP_NODELAY=1 + socket = r:TCP_NODELAY=1 + TIMEOUTclose = 0 + ; ************************************************************************** + ; * Services * + ; ************************************************************************** + [proxy] + accept = 3001 + connect = /tmp/lcp.socket + [ecpproxy] + accept = 3004 + connect = /tmp/ecp.socket + [fbcproxy] + accept = 3006 + connect=localhost:3005 + [webhookproxy] + accept = 3008 + connect = localhost:3009 + [wsproxy] + accept = 3042 + connect = localhost:3022 + {{ end }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/role.yaml b/ibm-connectivity-pack/templates/role.yaml new file mode 100644 index 0000000..0e23c8b --- /dev/null +++ b/ibm-connectivity-pack/templates/role.yaml @@ -0,0 +1,81 @@ +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: {{ include "ibm-connectivity-pack.role" .}} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +rules: + - verbs: + - get + - list + - create + - patch + - delete + apiGroups: + - '' + resources: + - services + - endpoints + - verbs: + - get + apiGroups: + - apps + resources: + - statefulsets + - verbs: + - get + - list + apiGroups: + - batch + resources: + - jobs + - verbs: + - get + - list + - create + - patch + - delete + apiGroups: + - apps + resources: + - deployments + - verbs: + - get + - update + - delete + - create + apiGroups: + - '' + resources: + - secrets + - verbs: + - get + - update + - delete + - create + - list + apiGroups: + - appconnect.ibm.com + resources: + - configurations + +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + name: {{ include "ibm-connectivity-pack.preHookJobRole" . }} + annotations: + "helm.sh/hook": pre-install, pre-upgrade, post-delete + "helm.sh/hook-weight": "-1" + "helm.sh/hook-delete-policy": hook-succeeded,hook-failed,before-hook-creation + {{- toYaml .Values.annotations | nindent 4 }} +rules: + - apiGroups: [""] + resources: ["secrets"] + verbs: ["create", "get", "list", "update", "patch", "delete"] \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/rolebinding.yaml b/ibm-connectivity-pack/templates/rolebinding.yaml new file mode 100644 index 0000000..c1d659b --- /dev/null +++ b/ibm-connectivity-pack/templates/rolebinding.yaml @@ -0,0 +1,36 @@ +kind: RoleBinding +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: {{ include "ibm-connectivity-pack.rolebinding" .}} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} +subjects: + - kind: ServiceAccount + name: {{ include "ibm-connectivity-pack.serviceAccountName" .}} + namespace: {{ .Release.Namespace }} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: {{ include "ibm-connectivity-pack.role" .}} + +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: {{ include "ibm-connectivity-pack.preHookJobRoleBinding" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": pre-install, pre-upgrade, post-delete + "helm.sh/hook-weight": "-1" + "helm.sh/hook-delete-policy": hook-succeeded,hook-failed,before-hook-creation + {{- toYaml .Values.annotations | nindent 4 }} +subjects: + - kind: ServiceAccount + name: {{ include "ibm-connectivity-pack.preHookJobSa" . }} + namespace: {{ .Release.Namespace }} +roleRef: + kind: Role + name: {{ include "ibm-connectivity-pack.preHookJobRole" . }} + apiGroup: rbac.authorization.k8s.io diff --git a/ibm-connectivity-pack/templates/route.yaml b/ibm-connectivity-pack/templates/route.yaml new file mode 100644 index 0000000..47d91af --- /dev/null +++ b/ibm-connectivity-pack/templates/route.yaml @@ -0,0 +1,22 @@ +{{- if .Values.route.enable -}} +apiVersion: route.openshift.io/v1 +kind: Route +metadata: + name: {{ include "ibm-connectivity-pack.service" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +spec: + to: + kind: Service + name: {{ include "ibm-connectivity-pack.service" . }} + weight: 100 + port: + targetPort: proxy + tls: + termination: passthrough + insecureEdgeTerminationPolicy: None + wildcardPolicy: None +{{ end }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/service.yaml b/ibm-connectivity-pack/templates/service.yaml new file mode 100644 index 0000000..1cd36b1 --- /dev/null +++ b/ibm-connectivity-pack/templates/service.yaml @@ -0,0 +1,38 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "ibm-connectivity-pack.service" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +spec: + type: ClusterIP + ports: + - name: proxy + protocol: TCP + port: 3001 + targetPort: 3001 + - name: eventproxy + protocol: TCP + port: 3004 + targetPort: 3004 + - name: ecpleaderelect + protocol: UDP + port: 3003 + targetPort: 3003 + - name: webhook + protocol: TCP + port: 3009 + targetPort: 3009 + - name: websocket + protocol: TCP + port: 3022 + targetPort: 3042 + - name: javaservice + protocol: TCP + port: 9080 + targetPort: 9080 + selector: + {{- include "ibm-connectivity-pack.selectorLabels" . | nindent 4 }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/serviceaccount.yaml b/ibm-connectivity-pack/templates/serviceaccount.yaml new file mode 100644 index 0000000..a9c02c1 --- /dev/null +++ b/ibm-connectivity-pack/templates/serviceaccount.yaml @@ -0,0 +1,26 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "ibm-connectivity-pack.serviceAccountName" . }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +secrets: + - name: {{ include "ibm-connectivity-pack.imagePullSecretname" . }} +imagePullSecrets: + - name: ibm-entitlement-key + - name: {{ include "ibm-connectivity-pack.imagePullSecretname" . }} + +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + annotations: + "helm.sh/hook": pre-install, pre-upgrade, post-delete + "helm.sh/hook-weight": "-1" + "helm.sh/hook-delete-policy": hook-succeeded,hook-failed,before-hook-creation + {{- toYaml .Values.annotations | nindent 4 }} + name: {{ include "ibm-connectivity-pack.preHookJobSa" . }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/tokenStore.yaml b/ibm-connectivity-pack/templates/tokenStore.yaml new file mode 100644 index 0000000..93ddd01 --- /dev/null +++ b/ibm-connectivity-pack/templates/tokenStore.yaml @@ -0,0 +1,13 @@ +{{- if not (lookup "v1" "Secret" .Release.Namespace (include "ibm-connectivity-pack.tokenStore" .)) -}} +kind: Secret +apiVersion: v1 +metadata: + name: {{ include "ibm-connectivity-pack.tokenStore" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + {{- toYaml .Values.annotations | nindent 4 }} +data: {} +type: Opaque +{{ end }} \ No newline at end of file diff --git a/ibm-connectivity-pack/templates/uninstallPostHook.yaml b/ibm-connectivity-pack/templates/uninstallPostHook.yaml new file mode 100644 index 0000000..ca585b8 --- /dev/null +++ b/ibm-connectivity-pack/templates/uninstallPostHook.yaml @@ -0,0 +1,38 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: {{ include "ibm-connectivity-pack.preHookJob" . }} + labels: + {{- include "ibm-connectivity-pack.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": post-delete + "helm.sh/hook-weight": "0" + "helm.sh/hook-delete-policy": hook-succeeded,hook-failed,before-hook-creation + {{- toYaml .Values.annotations | nindent 4 }} +spec: + activeDeadlineSeconds: 300 + template: + spec: + serviceAccountName: {{ include "ibm-connectivity-pack.preHookJob" . }}-sa + restartPolicy: Never + imagePullSecrets: + - name: {{ include "ibm-connectivity-pack.imagePullSecretname" . }} + containers: + - name: mtls-cert-generator + securityContext: + capabilities: + drop: + - ALL + privileged: false + runAsNonRoot: true + readOnlyRootFilesystem: false + allowPrivilegeEscalation: false + seccompProfile: + type: RuntimeDefault + image: {{ .Values.image.registry }}/{{ .Values.image.path }}/{{ .Values.preHook.image }}@{{ .Values.preHook.digest }} + command: + - /bin/sh + - "-c" + - | + kubectl delete secret {{ include "ibm-connectivity-pack.stunnelClient" .}} --namespace {{ .Release.Namespace }} --ignore-not-found=true + kubectl delete secret {{ include "ibm-connectivity-pack.stunnelServer" .}} --namespace {{ .Release.Namespace }} --ignore-not-found=true \ No newline at end of file diff --git a/ibm-connectivity-pack/values.yaml b/ibm-connectivity-pack/values.yaml new file mode 100644 index 0000000..42651e3 --- /dev/null +++ b/ibm-connectivity-pack/values.yaml @@ -0,0 +1,99 @@ +############################################################################### +# +# © Copyright IBM Corp. 2024 +# +############################################################################### +# Default values for IBM Connectivity pack. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +license: + accept: false +replicaCount: 1 # Replica of the POD +bunyan: "'{\"loglevel\" : \"info\", \"logsrc\" : true, \"logstdouttext\" : {}}'" # Override Log configuration +environmentVariables: {} # Yaml object ENV to be added on action and event pods +annotations: + productName: IBM Connectivity pack +image: + registry: cp.icr.io # Image registry base URL + path: cp/ibm-eventstreams + imagePullSecretName: ibm-entitlement-key # Image pull secret name if already exist please specify here else give below values for creating new Image pull secret + imagePullEmail: dummyEmail # Image pull secret email ID + imagePullUsername: iamapikey # Image pull username + imagePullPassword: '' # Image pull password +certificate: + MTLSenable: true # Enable MTLS if true else fallback to TLS + generate: true # generate new certificates use this for certificate rotation + clientSecretName: '' # If MTLS/TLS secret for client already exist please specify here + clientCertPropertyName: 'tls.crt' + clientCertKeyPropertyName: 'tls.key' + clientCertPKCSPropertyName: 'pkcs.p12' + serverSecretName: '' # If MTLS/TLS secret for server already exist please specify here + serverCertPropertyName: 'tls.crt' + serverCertKeyPropertyName: 'tls.key' + caCertPropertyName: 'ca.crt' + pkcsPassword: '' # pkcs.p12 password +route: + enable: false # generate OpenShift route for external access + domain: 'example.com' # subdomain of OpenShift cluster where connector service is being deployed +basicAuth: + enable: false # Enable basic auth for service + username: 'csuser' +preHook: + image: connectivity-pack-prehook + digest: sha256:a401875a35737d377e7a18753ba26a52999d8c060589256e32263008c5f19747 + tag: 1.0.0 +proxy: + name: connectivity-pack-proxy + image: connectivity-pack-proxy + digest: sha256:8ee2b1fe96f00fb90bd25df122097f3653ba7b83eb6808a5a3971e21548a5fe4 + tag: 1.0.0 +action: + name: action-connectors + image: action-connectors + digest: sha256:4068f1e0ba627e8237c08c8e5a2ba62e3c0d1ab1fa3ca983b22adc7d427fbc62 + resources: # Update below value for vertical scaling of container + limits: + cpu: '1' + ephemeral-storage: 10Gi + memory: 512Mi + requests: + cpu: 250m + ephemeral-storage: 5Gi + memory: 256Mi + tag: 1.0.0 +event: + enable: true # Set to true if Event container is needed + name: event-connectors + image: event-connectors # Update image + digest: sha256:1c575f3e7658c186723fb490c9513bcda666a4f823a2d56b11ae22ac1a6c37d3 + resources: # Update below value for vertical scaling of container + limits: + cpu: '1' + ephemeral-storage: 10Gi + memory: 750Mi + requests: + cpu: 250m + ephemeral-storage: 5Gi + memory: 400Mi + tag: 1.0.0 +javaservice: + enable: false +autoScaling: + enable: false # Update this to true to enable autoScaling + minReplicas: 1 # What is the minimum replica needed + maxReplicas: 5 # What is the maximum replica needed + cpuUtilization: 70 # % of CPU utilization after which POD to be replicated + memoryUtilization: 70 # % of Memory utilization after which POD to be replicated +securityContext: + capabilities: + drop: + - ALL + privileged: false + runAsNonRoot: true + readOnlyRootFilesystem: true + allowPrivilegeEscalation: false + seccompProfile: + type: RuntimeDefault +csCommon: + eventList: "['salesforceevent']" diff --git a/systems/salesforce.md b/systems/salesforce.md new file mode 100644 index 0000000..d523588 --- /dev/null +++ b/systems/salesforce.md @@ -0,0 +1,146 @@ +# Salesforce + +The Salesforce connector enables streaming of Salesforce platform events and Change Data Capture (CDC) events by using the Faye client or Bauyex protocol. This connector also supports discovery of custom objects and properties. + +## Pre-requisites + +- Ensure streaming API is enabled for your Salesforce edition and organization. +- Ensure you have the required permissions set up in Salesforce to use Change Data Capture objects. +- Ensure you have the required permissions set up in Salesforce to access the specified objects and events. +- Set the Session Security Level at login value to `None` instead of `High Assurance`. +- To connect to Salesforce sandboxes or subdomains and use Salesforce as a source system to trigger events, enable the Salesforce Organization object in your Salesforce environment. +- If using Change Data Capture (CDC) events, ensure that CDC is enabled for the specified objects in Salesforce. + +## Connecting to Salesforce + +The `connectivitypack.source` and `connectivitypack.source.url` configurations in the `KafkaConnector` custom resource provide the connector with the required information to connect to the data source. + +| **Name** | **Value or Description** | +| ------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | +| `connectivitypack.source` | `salesforce` | +| `connectivitypack.source.url` | Specifies the URL of the source system. For example, for Salesforce, the base URL of your instance is `https://<yourinstance>.salesforce.com`. | + +## Supported authentication mechanisms + +You can configure the following authentication mechanisms for Salesforce in the `KafkaConnector` custom resource depending on the authentication flow in Salesforce. + +### 1. Basic OAuth + +- **Use Case:** Recommended for most applications. +- **Required Credentials:** + - **Client Identity:** Obtain this by creating a *Connected App* in Salesforce and locating the *Consumer Key* under the application's settings. + - **Client Secret:** Available in the *Connected App* configuration alongside the *Consumer Key*. + - **Access Token and Refresh Token:** Generated by performing an OAuth flow with the configured Connected App. + +For more information, see the [Salesforce OAuth 2.0 Documentation](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/intro_understanding_web_server_oauth_flow.htm). + +| **Name** | **Description** | +| -------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | +| **connectivitypack.source.credentials.authType** | `BASIC_OAUTH` - Specifies that the connector will use Basic OAuth for authentication. | +| **connectivitypack.source.credentials.clientSecret** | The client secret of the Salesforce connected app used for Basic OAuth authentication. | +| **connectivitypack.source.credentials.clientIdentity** | The client ID (or consumer key) of the Salesforce connected app used for Basic OAuth authentication. | +| **connectivitypack.source.credentials.accessTokenBasicOauth** | The access token used for Basic OAuth authentication with Salesforce. | +| **connectivitypack.source.credentials.refreshTokenBasicOauth** | The refresh token used to renew the OAuth access token for Basic OAuth authentication. | + +### 2. OAuth2 Password (Deprecated) + +- **Use Case:** Legacy applications where Basic OAuth is not applicable. +- **Required Credentials:** + - **Username and Password:** Use the Salesforce account’s credentials. + - **Client Identity and Client Secret:** Same as Basic OAuth, obtained from the *Connected App* settings. +- **Important Note:** Salesforce has deprecated the OAuth2 Password grant type. If you're using this method, plan to migrate to Basic OAuth to ensure future compatibility. + +| **Name** | **Description** | +| ------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------ | +| **connectivitypack.source.credentials.authType** | `OAUTH2_PASSWORD` - Specifies that the connector will use OAuth 2.0 Password authentication. | +| **connectivitypack.source.credentials.username** | The Salesforce username required for OAuth2 Password authentication. | +| **connectivitypack.source.credentials.password** | The Salesforce password associated with the username for OAuth2 Password authentication. | +| **connectivitypack.source.credentials.clientSecret** | The client secret of the Salesforce Connected App required for OAuth2 Password authentication. | +| **connectivitypack.source.credentials.clientIdentity** | The client ID (or consumer key) of the Salesforce Connected App required for OAuth2 Password authentication. | + +## Supported objects and events + +You can specify any of the following objects and associated events in the `connectivitypack.source.<object>` and the `connectivitypack.source.<object>.events` sections of the `KafkaConnector` custom resource: + +### Platform Events + +[Salesforce platform events](https://www.ibm.com/links?url=https%3A%2F%2Fdeveloper.salesforce.com%2Fdocs%2Fatlas.en-us.platform_events.meta%2Fplatform_events%2Fplatform_events_intro.htm) deliver custom event notifications when something meaningful happens to objects that are defined in your Salesforce organization. Platform events are dynamic in nature and specific to the endpoint account connected, and as a result are not shown in the static list. + +| **Objects** | **Events** | +|:-------------------------------------:|:-------------------------:| +| Platform Event objects | CREATED | + +#### Replay ID +Salesforce provides queues for recording platform events and each event notification has a unique replay ID. Salesforce retains platform events for 72 hours, and a user can store a replay ID value to use when subscribing again to retrieve events during the retention window, as described in the [Salesforce documentation](https://developer.salesforce.com/docs/atlas.en-us.api_streaming.meta/api_streaming/using_streaming_api_durability.htm). + +The Salesforce connector uses the replay ID to track Salesforce platform events it has received. If the connector is restarted for any reason, it resumes streaming from where it stopped by using the replay ID. If the replay ID is no longer valid (more than 72 hours old), the connector will not be able to resume. Instead, it will start a new subscription to receive events from the current time. + +### Change Data Capture Events + +Salesforce CDC events provide notifications of state changes to objects that you are interested in. + +**Note:** CDC must be enabled by customers, and it is only available for objects in the dynamic list. + +All custom objects and a subset of standard objects are supported for use with Change Data Capture in Salesforce. For the full list, see [Change Event Object Support](https://www.ibm.com/links?url=https%3A%2F%2Fdeveloper.salesforce.com%2Fdocs%2Fatlas.en-us.change_data_capture.meta%2Fchange_data_capture%2Fcdc_object_support.htm). + +| **Objects** | **Events** | +|:-------------------------------------:|:-------------------------:| +| Change Data Capture objects | CREATED, UPDATED, DELETED | + +## Example configuration + +The following is an example of a connector configuration for Salesforce: + +```yaml +apiVersion: eventstreams.ibm.com/v1beta2 +kind: KafkaConnector +metadata: + labels: + # The eventstreams.ibm.com/cluster label identifies the Kafka Connect instance + # in which to create this connector. That KafkaConnect instance + # must have the eventstreams.ibm.com/use-connector-resources annotation + # set to true. + eventstreams.ibm.com/cluster: cp-connect-cluster + name: <name> + namespace: <namespace> +spec: + # Connector class name + class: com.ibm.eventstreams.connect.connectivitypacksource.ConnectivityPackSourceConnector + + config: + # Which data source to connect to, for example, salesforce + connectivitypack.source: salesforce + + # URL to access the data source, for example, `https://<your-instance-name>.salesforce.com` + connectivitypack.source.url: <URL-of-the-data-source-instance> + + # Credentials to access the data source using OAUTH2_PASSWORD authentication. + connectivitypack.source.credentials.authType: OAUTH2_PASSWORD + connectivitypack.source.credentials.username: <username> + connectivitypack.source.credentials.password: <password> + connectivitypack.source.credentials.clientSecret: <client-secret> + connectivitypack.source.credentials.clientIdentity: <client-identity> + + # Objects and event types to read from the data source + connectivitypack.source.objects: '<object1>,<object2>' + connectivitypack.source.<object1>.events: 'CREATED' + connectivitypack.source.<object2>.events: 'CREATED,UPDATED' + + # Optional, sets the format for Kafka topic names created by the connector. + # You can use placeholders such as '${object}' and '${eventType}', which the connector will replace automatically. + # Including '${object}' or '${eventType}' in the format is optional. For example, '${object}-topic-name' is a valid format. + # By default, the format is '${object}-${eventType}', but it's shown here for clarity. + connectivitypack.topic.name.format: '${object}-${eventType}' + + # `tasksMax` must be equal to the number of object-eventType combinations + # In this example it is 3 (object1 - CREATED, object2 - CREATED, object2 - UPDATED) + tasksMax: 3 + + # Specifies the converter class used to deserialize the message value. + # Change this to a different converter (for example, AvroConverter) as applicable. + value.converter: org.apache.kafka.connect.json.JsonConverter + + # Controls whether the schema is included in the message. + # Set this to false to disable schema support, or to true to enable schema inclusion (for example, for Avro). + value.converter.schemas.enable: false +``` \ No newline at end of file