From d4dd95549a2360ecb95628cbb1d5d27fcea22365 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 13:35:40 -0700 Subject: [PATCH 01/29] Attempt at deletion cronjob without copy --- containers/db_cleanup/Dockerfile | 19 + containers/db_cleanup/cleanup.js | 80 ++++ .../openshift/templates/db-cleanup-bc.yaml | 86 ++++ .../openshift/templates/db-cleanup-dc.yaml | 133 ++++++ containers/db_cleanup/package-lock.json | 419 ++++++++++++++++++ containers/db_cleanup/package.json | 18 + 6 files changed, 755 insertions(+) create mode 100644 containers/db_cleanup/Dockerfile create mode 100644 containers/db_cleanup/cleanup.js create mode 100644 containers/db_cleanup/openshift/templates/db-cleanup-bc.yaml create mode 100644 containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml create mode 100644 containers/db_cleanup/package-lock.json create mode 100644 containers/db_cleanup/package.json diff --git a/containers/db_cleanup/Dockerfile b/containers/db_cleanup/Dockerfile new file mode 100644 index 0000000..60dba03 --- /dev/null +++ b/containers/db_cleanup/Dockerfile @@ -0,0 +1,19 @@ +# Install dependencies only when needed +FROM registry.access.redhat.com/ubi8/nodejs-16 AS deps +USER 0 +WORKDIR /app + +# Ensure that the user can access all application files +RUN chmod -R g+rwX /app + +# Install dependencies based on the preferred package manager +COPY package.json package-lock.json* ./ +RUN npm ci + +COPY . . + +USER 1001 + +# EXPOSE 3700 + +# ENV PORT 3700 diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js new file mode 100644 index 0000000..f1b29da --- /dev/null +++ b/containers/db_cleanup/cleanup.js @@ -0,0 +1,80 @@ +require('dotenv').config(); +const dayjs = require('dayjs'); +const utc = require('dayjs/plugin/utc'); +const timezone = require('dayjs/plugin/timezone'); +dayjs.extend(utc); +dayjs.extend(timezone); + +// Set the default timezone for date objects to the system timezone +dayjs.tz.setDefault(dayjs.tz.guess()); +// NOTE: Since we are using the postgres COPY command, this requires superadmin privileges +// DB configuration +const pg = require('knex')({ + client: 'pg', + connection: { + host: process.env.DB_HOST, + port: process.env.DB_PORT, + user: process.env.DB_USERNAME, + database: process.env.DB_NAME, + password: process.env.DB_PASSWORD, + }, + pool: {min: 0, max: parseInt(process.env.DB_MAX_POOL)} +}); + +const retentionIntervalMonths = /* Math.abs(parseInt(process.env.RENTENTION_MONTHS)) */ 7; +const now = dayjs(); +const retainUntil = now.subtract(retentionIntervalMonths, 'month'); +const fileTimeString = `${now.toDate()}-${now.hour().toPrecision(2)}:${now.minute().toPrecision(2)}:${now.second().toPrecision(2)}`; +const retainUntilString = retainUntil.format('YYYY-MM-DD HH:mm:ss.SSS ZZ'); + +// COPY logs that are about to be deleted to csv +// TODO: Automate storing them elsewhere? +// try { +// pg.raw(`\\COPY public.vhers_audit_log TO '/deleted/vhers-audit-log.csv' WITH (FORMAT CSV, HEADER);`).then(); +// } catch (err) { +// console.log(err); +// exit(1); // cannot continue without saving backup +// } + +// pool.connect(function (err, client, done) { +// var stream = client.query(to(`COPY public.vhers_audit_log TO STDOUT`)) +// // var fileStream = fs.createReadStream('/deleted/vhers-audit-log.csv') +// // fileStream.on('error', done) +// stream.on('error', done) +// stream.on('finish', done) +// // fileStream.pipe(stream) +// }); + +// const fs = require('node:fs'); +// const { Pool } = require('pg'); +// const { to } = require('pg-copy-streams'); + +// var pool = new Pool({ +// host: process.env.DB_HOST, +// port: process.env.DB_PORT, +// user: process.env.DB_USERNAME, +// database: process.env.DB_NAME, +// password: process.env.DB_PASSWORD, +// }); + +// const exec = require('child_process').exec; + +// Delete the logs +pg('vhers_audit_log').where('created_at', '<', retainUntilString).delete().then( + () => { + pg('pin_audit_log').where('log_created_at', '<', retainUntilString).delete().then( + () => { + console.log(`Successfully deleted audit log entries prior to ${retainUntilString}`); + process.exit(0); + }, + (err) => { + console.log(err); + process.exit(1); + } + ); + }, + (err) => { + console.log(err); + process.exit(1); + } +); \ No newline at end of file diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-bc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-bc.yaml new file mode 100644 index 0000000..b85d856 --- /dev/null +++ b/containers/db_cleanup/openshift/templates/db-cleanup-bc.yaml @@ -0,0 +1,86 @@ +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + name: db-cleanup-build +parameters: + - name: NAME + displayName: Name + description: The name assigned to all of the objects defined in this template. + value: db-cleanup + required: true + - name: GIT_SOURCE_URL + displayName: GIT Source Repo URL + description: A GIT URL to your source code. + value: "https://github.com/bcgov/bc-emli-vhers-scan.git" + required: true + - name: GIT_REF + displayName: Git Reference + description: The git reference or branch. + value: dev + required: true + - name: SOURCE_CONTEXT_DIR + displayName: Source Context Directory + description: The source context directory. + value: containers/db_cleanup + - name: NAME_SPACE + displayName: Namespace for source image + value: c82b4c-tools + required: true + - name: CPU_REQUEST + value: "100m" + - name: CPU_LIMIT + value: "300m" + - name: MEMORY_REQUEST + value: "500Mi" + - name: MEMORY_LIMIT + value: "800Mi" +objects: + - kind: ImageStream + apiVersion: image.openshift.io/v1 + metadata: + name: "${NAME}" + + - kind: BuildConfig + apiVersion: build.openshift.io/v1 + metadata: + name: "${NAME}-build" + labels: + app: "${NAME}" + spec: + runPolicy: Serial + completionDeadlineSeconds: 1800 + source: + type: Git + git: + ref: "${GIT_REF}" + uri: "${GIT_SOURCE_URL}" + contextDir: "${SOURCE_CONTEXT_DIR}" + secrets: + - secret: + name: platform-services-controlled-etc-pki-entitlement + destinationDir: etc-pki-entitlement + configMaps: + - configMap: + name: platform-services-controlled-rhsm-conf + destinationDir: rhsm-conf + - configMap: + name: platform-services-controlled-rhsm-ca + destinationDir: rhsm-ca + strategy: + type: Docker + dockerStrategy: + from: + kind: ImageStreamTag + name: "ubi:latest" + namespace: "${NAME_SPACE}" + output: + to: + kind: ImageStreamTag + name: "${NAME}:latest" + resources: + requests: + cpu: ${CPU_REQUEST} + memory: ${MEMORY_REQUEST} + limits: + cpu: ${CPU_LIMIT} + memory: ${MEMORY_LIMIT} diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml new file mode 100644 index 0000000..6d189c1 --- /dev/null +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -0,0 +1,133 @@ +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + name: db-cleanup-deploy +parameters: + - name: NAME + description: Module name + value: db-cleanup + - name: ZONE + description: Deployment zone, e.g. pr-### or prod + value: tools + required: true + - name: IMAGE_TAG + description: Image tag to use + value: dev + - name: NAME_SPACE + value: c82b4c-tools + required: true + - name: DOMAIN + value: apps.silver.devops.gov.bc.ca + - name: CPU_REQUEST + value: "100m" + - name: CPU_LIMIT + value: "300m" + - name: MEMORY_REQUEST + value: "150Mi" + - name: MEMORY_LIMIT + value: "350Mi" + - name: REGISTRY + description: Container registry to import from (internal is image-registry.openshift-image-registry.svc:5000) + value: ghcr.io + - name: ORG_NAME + description: Organization name, e.g. bcgov + value: bcgov + - name: PROMOTE + description: Image (namespace/name:tag) to promote/import + value: bcgov/bc-emli-vhers-scan/db-cleanup:latest +objects: + - apiVersion: v1 + kind: ImageStream + metadata: + labels: + app: "${NAME}-${ZONE}" + name: "${NAME}-${ZONE}" + spec: + lookupPolicy: + local: false + tags: + - name: "${IMAGE_TAG}" + from: + kind: DockerImage + name: "${REGISTRY}/${PROMOTE}" + referencePolicy: + type: Local + - kind: PersistentVolumeClaim + apiVersion: v1 + metadata: + name: db-deletion-dump + spec: + accessModes: + - ReadWriteMany + resources: + requests: + storage: 500M + + - apiVersion: batch/v1 + kind: CronJob + metadata: + name: db-cleanup-cronjob + spec: + schedule: "30 2 * * SUN" + startingDeadlineSeconds: 3600 + concurrencyPolicy: Forbid + successfulJobsHistoryLimit: 3 + failedJobsHistoryLimit: 3 + jobTemplate: + spec: + template: + spec: + volumes: + - name: output + persistentVolumeClaim: + claimName: db-deletion-dump + containers: + - name: db-cleanup-container + image: "${NAME}-${ZONE}:${IMAGE_TAG}" + imagePullPolicy: Always + env: # Use environment variables for Secret values + - name: DB_NAME + valueFrom: + secretKeyRef: + name: patroni-ha-postgres-instance + key: app-db-name + - name: DB_HOST + valueFrom: + secretKeyRef: + name: patroni-ha-postgres-instance + key: app-db-hostname + - name: DB_PORT + valueFrom: + secretKeyRef: + name: patroni-ha-postgres-instance + key: app-db-port + - name: DB_USER + valueFrom: + secretKeyRef: + name: patroni-ha-postgres-instance + key: superuser-username + - name: DB_PASSWORD + valueFrom: + secretKeyRef: + name: patroni-ha-postgres-instance + key: superuser-password + - name: RENTENTION_MONTHS + valueFrom: + secretKeyRef: + name: db-cleanup + key: RENTENTION_MONTHS + command: # Command-line arguments + - node + - cleanup.js + resources: + limits: + memory: "${MEMORY_LIMIT}" + cpu: "${CPU_LIMIT}" + requests: + memory: "${MEMORY_REQUEST}" + cpu: "${CPU_REQUEST}" + volumes: + - name: output + persistentVolumeClaim: + claimName: db-deletion-dump + restartPolicy: OnFailure diff --git a/containers/db_cleanup/package-lock.json b/containers/db_cleanup/package-lock.json new file mode 100644 index 0000000..fa68fea --- /dev/null +++ b/containers/db_cleanup/package-lock.json @@ -0,0 +1,419 @@ +{ + "name": "db_cleanup", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "db_cleanup", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "dayjs": "^1.11.10", + "dotenv": "^16.4.1", + "knex": "^3.1.0", + "pg": "^8.11.3", + "pg-copy-streams": "^6.0.6" + } + }, + "node_modules/buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/colorette": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz", + "integrity": "sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==" + }, + "node_modules/commander": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", + "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "engines": { + "node": ">=14" + } + }, + "node_modules/dayjs": { + "version": "1.11.10", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.10.tgz", + "integrity": "sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ==" + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dotenv": { + "version": "16.4.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.1.tgz", + "integrity": "sha512-CjA3y+Dr3FyFDOAMnxZEGtnW9KBR2M0JvvUtXNW+dYJL5ROWxP9DUHCwgFqpMk0OXCc0ljhaNTr2w/kutYIcHQ==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/motdotla/dotenv?sponsor=1" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/getopts": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/getopts/-/getopts-2.3.0.tgz", + "integrity": "sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA==" + }, + "node_modules/hasown": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", + "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/interpret": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz", + "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/knex": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/knex/-/knex-3.1.0.tgz", + "integrity": "sha512-GLoII6hR0c4ti243gMs5/1Rb3B+AjwMOfjYm97pu0FOQa7JH56hgBxYf5WK2525ceSbBY1cjeZ9yk99GPMB6Kw==", + "dependencies": { + "colorette": "2.0.19", + "commander": "^10.0.0", + "debug": "4.3.4", + "escalade": "^3.1.1", + "esm": "^3.2.25", + "get-package-type": "^0.1.0", + "getopts": "2.3.0", + "interpret": "^2.2.0", + "lodash": "^4.17.21", + "pg-connection-string": "2.6.2", + "rechoir": "^0.8.0", + "resolve-from": "^5.0.0", + "tarn": "^3.0.2", + "tildify": "2.0.0" + }, + "bin": { + "knex": "bin/cli.js" + }, + "engines": { + "node": ">=16" + }, + "peerDependenciesMeta": { + "better-sqlite3": { + "optional": true + }, + "mysql": { + "optional": true + }, + "mysql2": { + "optional": true + }, + "pg": { + "optional": true + }, + "pg-native": { + "optional": true + }, + "sqlite3": { + "optional": true + }, + "tedious": { + "optional": true + } + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/obuf": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" + }, + "node_modules/packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "node_modules/pg": { + "version": "8.11.3", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.11.3.tgz", + "integrity": "sha512-+9iuvG8QfaaUrrph+kpF24cXkH1YOOUeArRNYIxq1viYHZagBxrTno7cecY1Fa44tJeZvaoG+Djpkc3JwehN5g==", + "dependencies": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.6.2", + "pg-pool": "^3.6.1", + "pg-protocol": "^1.6.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.1.1" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz", + "integrity": "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.2.tgz", + "integrity": "sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA==" + }, + "node_modules/pg-copy-streams": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/pg-copy-streams/-/pg-copy-streams-6.0.6.tgz", + "integrity": "sha512-Z+Dd2C2NIDTsjyFKmc6a9QLlpM8tjpERx+43RSx0WmL7j3uNChERi3xSvZUL0hWJ1oRUn4S3fhyt3apdSrTyKQ==", + "dependencies": { + "obuf": "^1.1.2" + } + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.1.tgz", + "integrity": "sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz", + "integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/rechoir": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.8.0.tgz", + "integrity": "sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ==", + "dependencies": { + "resolve": "^1.20.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tarn": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz", + "integrity": "sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/tildify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz", + "integrity": "sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + } + } +} diff --git a/containers/db_cleanup/package.json b/containers/db_cleanup/package.json new file mode 100644 index 0000000..ba80ca5 --- /dev/null +++ b/containers/db_cleanup/package.json @@ -0,0 +1,18 @@ +{ + "name": "db_cleanup", + "version": "1.0.0", + "description": "", + "main": "cleanup.js", + "scripts": { + "cleanup": "node cleanup.js" + }, + "author": "", + "license": "ISC", + "dependencies": { + "dayjs": "^1.11.10", + "dotenv": "^16.4.1", + "knex": "^3.1.0", + "pg": "^8.11.3", + "pg-copy-streams": "^6.0.6" + } +} From 6cde014c70cd144ce6944d8d38d73d8670d1eff3 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 14:06:54 -0700 Subject: [PATCH 02/29] uncommenting env variable --- containers/db_cleanup/cleanup.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index f1b29da..0077796 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -21,7 +21,7 @@ const pg = require('knex')({ pool: {min: 0, max: parseInt(process.env.DB_MAX_POOL)} }); -const retentionIntervalMonths = /* Math.abs(parseInt(process.env.RENTENTION_MONTHS)) */ 7; +const retentionIntervalMonths = Math.abs(parseInt(process.env.RENTENTION_MONTHS)); const now = dayjs(); const retainUntil = now.subtract(retentionIntervalMonths, 'month'); const fileTimeString = `${now.toDate()}-${now.hour().toPrecision(2)}:${now.minute().toPrecision(2)}:${now.second().toPrecision(2)}`; From f120391074ac7f6649263b89ef1e09f967171001 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 15:05:20 -0700 Subject: [PATCH 03/29] test deploy on openshift --- .github/workflows/.merge.yml | 17 ++++++- containers/db_cleanup/cleanup.js | 50 ++++++++++++------- .../openshift/templates/db-cleanup-dc.yaml | 20 ++++---- containers/db_cleanup/package-lock.json | 35 +++++++++++++ containers/db_cleanup/package.json | 1 + 5 files changed, 93 insertions(+), 30 deletions(-) diff --git a/.github/workflows/.merge.yml b/.github/workflows/.merge.yml index 706f250..74435f5 100644 --- a/.github/workflows/.merge.yml +++ b/.github/workflows/.merge.yml @@ -4,6 +4,12 @@ on: pull_request: branches: [dev] types: [closed] + # temp change to force a deployment + push: + branches: + - "*" + commit: + workflow_dispatch: concurrency: @@ -59,12 +65,16 @@ jobs: packages: write strategy: matrix: - package: [vhers-virus-scan] + package: [vhers-virus-scan, db-cleanup] include: - package: vhers-virus-scan build_context: ./ triggers: ('vhers-virus-scan/') build_file: Dockerfile + - package: db-cleanup + build_context: ./containers/db_cleanup + triggers: ('vhers-virus-scan/containers/db_cleanup') + build_file: Dockerfile timeout-minutes: 10 steps: - uses: bcgov-nr/action-builder-ghcr@v2.0.0 @@ -90,6 +100,11 @@ jobs: - name: vhers-virus-scan file: ./openshift.deploy.yml overwrite: true + name: [db-cleanup] + include: + - name: db-cleanup + file: ./containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml + overwrite: true steps: - uses: bcgov-nr/action-deployer-openshift@v2.0.0 diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index 0077796..95bdcc5 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -28,28 +28,21 @@ const fileTimeString = `${now.toDate()}-${now.hour().toPrecision(2)}:${now.minut const retainUntilString = retainUntil.format('YYYY-MM-DD HH:mm:ss.SSS ZZ'); // COPY logs that are about to be deleted to csv +// \\g /deleted/vhers-audit-log.csv // TODO: Automate storing them elsewhere? -// try { -// pg.raw(`\\COPY public.vhers_audit_log TO '/deleted/vhers-audit-log.csv' WITH (FORMAT CSV, HEADER);`).then(); -// } catch (err) { -// console.log(err); -// exit(1); // cannot continue without saving backup -// } + // pg.raw(`COPY public.vhers_audit_log TO STDOUT WITH (FORMAT CSV, HEADER)`).then( + // (ret)=>{console.log(ret); process.exit(0);}, + // (err) => {console.log(err); process.exit(1);} + // ); -// pool.connect(function (err, client, done) { -// var stream = client.query(to(`COPY public.vhers_audit_log TO STDOUT`)) -// // var fileStream = fs.createReadStream('/deleted/vhers-audit-log.csv') -// // fileStream.on('error', done) -// stream.on('error', done) -// stream.on('finish', done) -// // fileStream.pipe(stream) -// }); - -// const fs = require('node:fs'); +// const fs = require('fs'); +// const csv = require('csv'); +// const path = require('path'); +// const EOL = require('os').EOL; // const { Pool } = require('pg'); // const { to } = require('pg-copy-streams'); -// var pool = new Pool({ +// const pool = new Pool({ // host: process.env.DB_HOST, // port: process.env.DB_PORT, // user: process.env.DB_USERNAME, @@ -57,7 +50,28 @@ const retainUntilString = retainUntil.format('YYYY-MM-DD HH:mm:ss.SSS ZZ'); // password: process.env.DB_PASSWORD, // }); -// const exec = require('child_process').exec; +// const outFile = path.join( __dirname, 'vhers_audit_log.csv'); +// const writeStream = fs.createWriteStream(outFile); + +// const parse = csv.parse(); + +// const transform = csv.transform((row, cb) => { +// row.push('NEW_COL'); +// result = row.join(',') + EOL; +// cb(null, result); +// }); + +// pool.connect(function (err, client, done) { +// const stream = client.query(to(`COPY public.vhers_audit_log TO STDOUT WITH (FORMAT CSV, HEADER)`)) +// // var fileStream = fs.createReadStream('/deleted/vhers-audit-log.csv') +// // // fileStream.on('error', done) +// // stream.on('error', done) +// // stream.on('finish', done) +// // // fileStream.pipe(stream) +// stream.pipe(parse).pipe(transform).pipe(writeStream); +// stream.on('end', done) +// stream.on('error', done) +// }); // Delete the logs pg('vhers_audit_log').where('created_at', '<', retainUntilString).delete().then( diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index 6d189c1..87c15cd 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -68,8 +68,10 @@ objects: metadata: name: db-cleanup-cronjob spec: - schedule: "30 2 * * SUN" - startingDeadlineSeconds: 3600 + # schedule: "30 2 * * SUN" + # Different schedule for test purposes + schedule: "*/10 * * * *" + startingDeadlineSeconds: 200 concurrencyPolicy: Forbid successfulJobsHistoryLimit: 3 failedJobsHistoryLimit: 3 @@ -90,27 +92,27 @@ objects: valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: app-db-name + key: app-db-name-dev - name: DB_HOST valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: app-db-hostname + key: db-hostname-dev - name: DB_PORT valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: app-db-port + key: db-port-dev - name: DB_USER valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: superuser-username + key: superuser-username-dev - name: DB_PASSWORD valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: superuser-password + key: superuser-password-dev - name: RENTENTION_MONTHS valueFrom: secretKeyRef: @@ -126,8 +128,4 @@ objects: requests: memory: "${MEMORY_REQUEST}" cpu: "${CPU_REQUEST}" - volumes: - - name: output - persistentVolumeClaim: - claimName: db-deletion-dump restartPolicy: OnFailure diff --git a/containers/db_cleanup/package-lock.json b/containers/db_cleanup/package-lock.json index fa68fea..e1bb28c 100644 --- a/containers/db_cleanup/package-lock.json +++ b/containers/db_cleanup/package-lock.json @@ -9,6 +9,7 @@ "version": "1.0.0", "license": "ISC", "dependencies": { + "csv": "^6.3.6", "dayjs": "^1.11.10", "dotenv": "^16.4.1", "knex": "^3.1.0", @@ -37,6 +38,35 @@ "node": ">=14" } }, + "node_modules/csv": { + "version": "6.3.6", + "resolved": "https://registry.npmjs.org/csv/-/csv-6.3.6.tgz", + "integrity": "sha512-jsEsX2HhGp7xiwrJu5srQavKsh+HUJcCi78Ar3m4jlmFKRoTkkMy7ZZPP+LnQChmaztW+uj44oyfMb59daAs/Q==", + "dependencies": { + "csv-generate": "^4.3.1", + "csv-parse": "^5.5.3", + "csv-stringify": "^6.4.5", + "stream-transform": "^3.3.0" + }, + "engines": { + "node": ">= 0.1.90" + } + }, + "node_modules/csv-generate": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/csv-generate/-/csv-generate-4.3.1.tgz", + "integrity": "sha512-7YeeJq+44/I/O5N2sr2qBMcHZXhpfe38eh7DOFxyMtYO+Pir7kIfgFkW5MPksqKqqR6+/wX7UGoZm1Ot11151w==" + }, + "node_modules/csv-parse": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-5.5.3.tgz", + "integrity": "sha512-v0KW6C0qlZzoGjk6u5tLmVfyZxNgPGXZsWTXshpAgKVGmGXzaVWGdlCFxNx5iuzcXT/oJN1HHM9DZKwtAtYa+A==" + }, + "node_modules/csv-stringify": { + "version": "6.4.5", + "resolved": "https://registry.npmjs.org/csv-stringify/-/csv-stringify-6.4.5.tgz", + "integrity": "sha512-SPu1Vnh8U5EnzpNOi1NDBL5jU5Rx7DVHr15DNg9LXDTAbQlAVAmEbVt16wZvEW9Fu9Qt4Ji8kmeCJ2B1+4rFTQ==" + }, "node_modules/dayjs": { "version": "1.11.10", "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.10.tgz", @@ -380,6 +410,11 @@ "node": ">= 10.x" } }, + "node_modules/stream-transform": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/stream-transform/-/stream-transform-3.3.0.tgz", + "integrity": "sha512-pG1NeDdmErNYKtvTpFayrEueAmL0xVU5wd22V7InGnatl4Ocq3HY7dcXIKj629kXvYQvglCC7CeDIGAlx1RNGA==" + }, "node_modules/supports-preserve-symlinks-flag": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", diff --git a/containers/db_cleanup/package.json b/containers/db_cleanup/package.json index ba80ca5..ffa7350 100644 --- a/containers/db_cleanup/package.json +++ b/containers/db_cleanup/package.json @@ -9,6 +9,7 @@ "author": "", "license": "ISC", "dependencies": { + "csv": "^6.3.6", "dayjs": "^1.11.10", "dotenv": "^16.4.1", "knex": "^3.1.0", From 7d8ab7e4edb43a2a2931861ee31273a49b32e709 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 15:06:55 -0700 Subject: [PATCH 04/29] fix yml --- .github/workflows/.merge.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/.merge.yml b/.github/workflows/.merge.yml index 74435f5..237ced1 100644 --- a/.github/workflows/.merge.yml +++ b/.github/workflows/.merge.yml @@ -8,8 +8,6 @@ on: push: branches: - "*" - commit: - workflow_dispatch: concurrency: From 71e1be1d3d617a6d7eae3e677ae5d7a6ef392b28 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 15:07:42 -0700 Subject: [PATCH 05/29] deployment? --- containers/db_cleanup/cleanup.js | 1 + 1 file changed, 1 insertion(+) diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index 95bdcc5..3eac5cf 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -27,6 +27,7 @@ const retainUntil = now.subtract(retentionIntervalMonths, 'month'); const fileTimeString = `${now.toDate()}-${now.hour().toPrecision(2)}:${now.minute().toPrecision(2)}:${now.second().toPrecision(2)}`; const retainUntilString = retainUntil.format('YYYY-MM-DD HH:mm:ss.SSS ZZ'); +// change to force deploy // COPY logs that are about to be deleted to csv // \\g /deleted/vhers-audit-log.csv // TODO: Automate storing them elsewhere? From 5bccb58cb1cf025152b3cb04ab2055d88a9c1911 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 15:10:22 -0700 Subject: [PATCH 06/29] fix yaml again --- .github/workflows/.merge.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/.merge.yml b/.github/workflows/.merge.yml index 237ced1..34ddf7f 100644 --- a/.github/workflows/.merge.yml +++ b/.github/workflows/.merge.yml @@ -93,13 +93,11 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - name: [vhers-virus-scan] + name: [vhers-virus-scan, db-cleanup] include: - name: vhers-virus-scan file: ./openshift.deploy.yml overwrite: true - name: [db-cleanup] - include: - name: db-cleanup file: ./containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml overwrite: true From 1c38220421a19f67b59119ae628c108ea681df03 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 15:37:59 -0700 Subject: [PATCH 07/29] yml change? --- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index 87c15cd..56bc588 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -12,7 +12,7 @@ parameters: required: true - name: IMAGE_TAG description: Image tag to use - value: dev + value: latest - name: NAME_SPACE value: c82b4c-tools required: true From be3ecac24cd279415bf5cdd52e0659fa7d3b6c50 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 16:02:16 -0700 Subject: [PATCH 08/29] fix? --- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index 56bc588..01e04d4 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -85,7 +85,7 @@ objects: claimName: db-deletion-dump containers: - name: db-cleanup-container - image: "${NAME}-${ZONE}:${IMAGE_TAG}" + image: "image-registry.openshift-image-registry.svc:5000/c82b4c-tools/db-cleanup-tools" imagePullPolicy: Always env: # Use environment variables for Secret values - name: DB_NAME From b7b2a8e43f56514f01208deea3ab7ad5f06e496a Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 16:12:09 -0700 Subject: [PATCH 09/29] fix env variable name --- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index 01e04d4..7d9fc57 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -92,7 +92,7 @@ objects: valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: app-db-name-dev + key: db-name-dev - name: DB_HOST valueFrom: secretKeyRef: From e7c34059b8036ada48e38f43521db04f0bbcd9a7 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 16:21:12 -0700 Subject: [PATCH 10/29] remove unnessecary pool --- containers/db_cleanup/cleanup.js | 1 - 1 file changed, 1 deletion(-) diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index 3eac5cf..cdb0baa 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -18,7 +18,6 @@ const pg = require('knex')({ database: process.env.DB_NAME, password: process.env.DB_PASSWORD, }, - pool: {min: 0, max: parseInt(process.env.DB_MAX_POOL)} }); const retentionIntervalMonths = Math.abs(parseInt(process.env.RENTENTION_MONTHS)); From 92c8e9aadf32f862b7e365018e083631002f93d9 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 16:31:49 -0700 Subject: [PATCH 11/29] env variable change --- containers/db_cleanup/cleanup.js | 2 +- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index cdb0baa..404e376 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -14,7 +14,7 @@ const pg = require('knex')({ connection: { host: process.env.DB_HOST, port: process.env.DB_PORT, - user: process.env.DB_USERNAME, + user: process.env.DB_USER, database: process.env.DB_NAME, password: process.env.DB_PASSWORD, }, diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index 7d9fc57..282ae7c 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -70,7 +70,7 @@ objects: spec: # schedule: "30 2 * * SUN" # Different schedule for test purposes - schedule: "*/10 * * * *" + schedule: "*/5 * * * *" startingDeadlineSeconds: 200 concurrencyPolicy: Forbid successfulJobsHistoryLimit: 3 From 1b60a5c2a6497a343e54f6955de962b147d0b284 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Thu, 1 Feb 2024 16:38:28 -0700 Subject: [PATCH 12/29] set normal schedule for cronjob --- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index 282ae7c..c4fd121 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -68,9 +68,9 @@ objects: metadata: name: db-cleanup-cronjob spec: - # schedule: "30 2 * * SUN" + schedule: "30 2 * * SUN" # Different schedule for test purposes - schedule: "*/5 * * * *" + # schedule: "*/5 * * * *" startingDeadlineSeconds: 200 concurrencyPolicy: Forbid successfulJobsHistoryLimit: 3 From c8a7471a9cab66d1a5e53d160b87afffb10dab07 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 10:29:19 -0700 Subject: [PATCH 13/29] Add functionality to copy out old logs and delte them form the db --- containers/db_cleanup/cleanup.js | 162 +++++++++++++++++++------------ 1 file changed, 102 insertions(+), 60 deletions(-) diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index 404e376..24b216a 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -1,4 +1,10 @@ require('dotenv').config(); +const fs = require('fs'); +const csv = require('csv'); +const path = require('path'); +const EOL = require('os').EOL; +const { Client } = require('pg'); +const { to } = require('pg-copy-streams'); const dayjs = require('dayjs'); const utc = require('dayjs/plugin/utc'); const timezone = require('dayjs/plugin/timezone'); @@ -20,75 +26,111 @@ const pg = require('knex')({ }, }); +const pinClient = new Client({ + host: process.env.DB_HOST, + port: process.env.DB_PORT, + user: process.env.DB_USERNAME, + database: process.env.DB_NAME, + password: process.env.DB_PASSWORD, +}); + +const vhersClient = new Client({ + host: process.env.DB_HOST, + port: process.env.DB_PORT, + user: process.env.DB_USERNAME, + database: process.env.DB_NAME, + password: process.env.DB_PASSWORD, +}); + +// Get timestamps for now and deletion interval const retentionIntervalMonths = Math.abs(parseInt(process.env.RENTENTION_MONTHS)); const now = dayjs(); const retainUntil = now.subtract(retentionIntervalMonths, 'month'); -const fileTimeString = `${now.toDate()}-${now.hour().toPrecision(2)}:${now.minute().toPrecision(2)}:${now.second().toPrecision(2)}`; +const fileTimeString = now.format('YYYY-MM-DD-HH-mm-ss'); const retainUntilString = retainUntil.format('YYYY-MM-DD HH:mm:ss.SSS ZZ'); -// change to force deploy -// COPY logs that are about to be deleted to csv -// \\g /deleted/vhers-audit-log.csv -// TODO: Automate storing them elsewhere? - // pg.raw(`COPY public.vhers_audit_log TO STDOUT WITH (FORMAT CSV, HEADER)`).then( - // (ret)=>{console.log(ret); process.exit(0);}, - // (err) => {console.log(err); process.exit(1);} - // ); +// Create directory for entries +const dir = `./deleted/${fileTimeString}` +if (!fs.existsSync(dir)){ + fs.mkdirSync(dir, { recursive: true }); +} -// const fs = require('fs'); -// const csv = require('csv'); -// const path = require('path'); -// const EOL = require('os').EOL; -// const { Pool } = require('pg'); -// const { to } = require('pg-copy-streams'); +// Create files +const vhersOutFile = path.join( __dirname, 'deleted', fileTimeString, 'vhers_audit_log.csv'); +const pinOutFile = path.join( __dirname, 'deleted', fileTimeString, 'pin_audit_log.csv'); +const vhersWriteStream = fs.createWriteStream(vhersOutFile); +const pinWriteStream = fs.createWriteStream(pinOutFile); -// const pool = new Pool({ -// host: process.env.DB_HOST, -// port: process.env.DB_PORT, -// user: process.env.DB_USERNAME, -// database: process.env.DB_NAME, -// password: process.env.DB_PASSWORD, -// }); +// Csv transforms +const parse = csv.parse(); -// const outFile = path.join( __dirname, 'vhers_audit_log.csv'); -// const writeStream = fs.createWriteStream(outFile); +const transform = csv.transform((row, cb) => { + result = row.join(',') + EOL; + cb(null, result); +}); + +const pinParse = csv.parse(); + +const pinTransform = csv.transform((row, cb) => { + result = row.join(',') + EOL; + cb(null, result); +}); -// const parse = csv.parse(); +// Copy functions +function async_vhers_output() { + return new Promise(function(resolve, reject) { + const vhersStream = vhersClient.query(to(`COPY (SELECT * FROM public.vhers_audit_log WHERE created_at < '${retainUntilString}') TO STDOUT WITH (FORMAT CSV, HEADER)`)); + vhersStream.pipe(parse).pipe(transform).pipe(vhersWriteStream); + vhersStream.on('end', () => { return resolve()}); + vhersStream.on('error', (err) => {return reject(err)}); + }) +} -// const transform = csv.transform((row, cb) => { -// row.push('NEW_COL'); -// result = row.join(',') + EOL; -// cb(null, result); -// }); +function async_pin_output() { + return new Promise(function(resolve, reject) { + const pinStream = pinClient.query(to(`COPY (SELECT * FROM public.pin_audit_log WHERE log_created_at < '${retainUntilString}') TO STDOUT WITH (FORMAT CSV, HEADER)`)); + pinStream.pipe(pinParse).pipe(pinTransform).pipe(pinWriteStream); + pinStream.on('end', () => { return resolve()}); + pinStream.on('error', (err) => {return reject(err)}); + }) +} -// pool.connect(function (err, client, done) { -// const stream = client.query(to(`COPY public.vhers_audit_log TO STDOUT WITH (FORMAT CSV, HEADER)`)) -// // var fileStream = fs.createReadStream('/deleted/vhers-audit-log.csv') -// // // fileStream.on('error', done) -// // stream.on('error', done) -// // stream.on('finish', done) -// // // fileStream.pipe(stream) -// stream.pipe(parse).pipe(transform).pipe(writeStream); -// stream.on('end', done) -// stream.on('error', done) -// }); +// Copy function IIFE (this gets arounds not allowing async functions outside of modules) +( async() => { + await vhersClient.connect(); + await pinClient.connect(); + const promises = []; + promises.push(async_vhers_output()); + promises.push(async_pin_output()); + Promise.all(promises).then(function AcceptHandler() { + delete_entries(); + }, function ErrorHandler(error) { + console.log(error); + process.exit(1); + }); + +})(); -// Delete the logs -pg('vhers_audit_log').where('created_at', '<', retainUntilString).delete().then( - () => { - pg('pin_audit_log').where('log_created_at', '<', retainUntilString).delete().then( - () => { - console.log(`Successfully deleted audit log entries prior to ${retainUntilString}`); - process.exit(0); - }, - (err) => { - console.log(err); - process.exit(1); - } - ); - }, - (err) => { - console.log(err); - process.exit(1); - } -); \ No newline at end of file +// Entry deletion function +function delete_entries() { + pg('vhers_audit_log').where('created_at', '<', retainUntilString).delete().then( + () => { + pg('pin_audit_log').where('log_created_at', '<', retainUntilString).delete().then( + () => { + vhersClient.end(); + pinClient.end(); + console.log(`Successfully deleted audit log entries prior to ${retainUntilString}`); + process.exit(0); + }, + (err) => { + console.log(err); + process.exit(1); + } + ); + }, + (err) => { + console.log(err); + process.exit(1); + } + ); +} \ No newline at end of file From 70272cef81dbcc87ad6a1377fba69613a589ac29 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 10:29:59 -0700 Subject: [PATCH 14/29] Schedule for test purposes --- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index c4fd121..282ae7c 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -68,9 +68,9 @@ objects: metadata: name: db-cleanup-cronjob spec: - schedule: "30 2 * * SUN" + # schedule: "30 2 * * SUN" # Different schedule for test purposes - # schedule: "*/5 * * * *" + schedule: "*/5 * * * *" startingDeadlineSeconds: 200 concurrencyPolicy: Forbid successfulJobsHistoryLimit: 3 From ac3851aa126a129b50083f5302eb69b1cee07bf4 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 10:46:25 -0700 Subject: [PATCH 15/29] Fix postgres config --- containers/db_cleanup/cleanup.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index 24b216a..d85face 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -29,7 +29,7 @@ const pg = require('knex')({ const pinClient = new Client({ host: process.env.DB_HOST, port: process.env.DB_PORT, - user: process.env.DB_USERNAME, + user: process.env.DB_USER, database: process.env.DB_NAME, password: process.env.DB_PASSWORD, }); @@ -37,7 +37,7 @@ const pinClient = new Client({ const vhersClient = new Client({ host: process.env.DB_HOST, port: process.env.DB_PORT, - user: process.env.DB_USERNAME, + user: process.env.DB_USER, database: process.env.DB_NAME, password: process.env.DB_PASSWORD, }); From 4d9a6939162be36399aeacedecdb3c5526809653 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 11:12:58 -0700 Subject: [PATCH 16/29] test persistent --- .../db_cleanup/openshift/templates/db-cleanup-dc.yaml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index 282ae7c..b2e118e 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -68,9 +68,9 @@ objects: metadata: name: db-cleanup-cronjob spec: - # schedule: "30 2 * * SUN" + schedule: "30 2 * * SUN" # Different schedule for test purposes - schedule: "*/5 * * * *" + # schedule: "*/5 * * * *" startingDeadlineSeconds: 200 concurrencyPolicy: Forbid successfulJobsHistoryLimit: 3 @@ -87,6 +87,9 @@ objects: - name: db-cleanup-container image: "image-registry.openshift-image-registry.svc:5000/c82b4c-tools/db-cleanup-tools" imagePullPolicy: Always + volumeMounts: + - mountPath: "/deleted" + name: deleted env: # Use environment variables for Secret values - name: DB_NAME valueFrom: From dce797d8761dd9bb8dcc32bc285cbfb278b6e409 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 11:31:37 -0700 Subject: [PATCH 17/29] change to check pvc access --- containers/db_cleanup/cleanup.js | 8 ++++++-- .../db_cleanup/openshift/templates/db-cleanup-dc.yaml | 6 +++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index d85face..6a72f4a 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -111,6 +111,10 @@ function async_pin_output() { })(); +function sleep(ms) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + // Entry deletion function function delete_entries() { pg('vhers_audit_log').where('created_at', '<', retainUntilString).delete().then( @@ -119,8 +123,8 @@ function delete_entries() { () => { vhersClient.end(); pinClient.end(); - console.log(`Successfully deleted audit log entries prior to ${retainUntilString}`); - process.exit(0); + sleep(240000).then(() => {console.log(`Successfully deleted audit log entries prior to ${retainUntilString}`); + process.exit(0);}) }, (err) => { console.log(err); diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index b2e118e..d851a27 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -68,9 +68,9 @@ objects: metadata: name: db-cleanup-cronjob spec: - schedule: "30 2 * * SUN" + # schedule: "30 2 * * SUN" # Different schedule for test purposes - # schedule: "*/5 * * * *" + schedule: "*/5 * * * *" startingDeadlineSeconds: 200 concurrencyPolicy: Forbid successfulJobsHistoryLimit: 3 @@ -89,7 +89,7 @@ objects: imagePullPolicy: Always volumeMounts: - mountPath: "/deleted" - name: deleted + name: output env: # Use environment variables for Secret values - name: DB_NAME valueFrom: From 1fe0ed1b7f639ef54bd35b6a0acc4280b7569dc0 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 11:41:02 -0700 Subject: [PATCH 18/29] rebuild? --- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index d851a27..e5d68c1 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -70,6 +70,7 @@ objects: spec: # schedule: "30 2 * * SUN" # Different schedule for test purposes + # change for rebuild schedule: "*/5 * * * *" startingDeadlineSeconds: 200 concurrencyPolicy: Forbid From 7003170dca655bd567d1b99c29875f8b039b78cf Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 11:45:43 -0700 Subject: [PATCH 19/29] :/ --- .github/workflows/.merge.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/.merge.yml b/.github/workflows/.merge.yml index 34ddf7f..242968d 100644 --- a/.github/workflows/.merge.yml +++ b/.github/workflows/.merge.yml @@ -92,6 +92,7 @@ jobs: issues: write runs-on: ubuntu-22.04 strategy: + fail-fast: false matrix: name: [vhers-virus-scan, db-cleanup] include: From b7b6c6df6b2104d81604347ade40db4756478dec Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 11:56:08 -0700 Subject: [PATCH 20/29] save to pvc? --- containers/db_cleanup/Dockerfile | 1 + containers/db_cleanup/cleanup.js | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/containers/db_cleanup/Dockerfile b/containers/db_cleanup/Dockerfile index 60dba03..ca480f0 100644 --- a/containers/db_cleanup/Dockerfile +++ b/containers/db_cleanup/Dockerfile @@ -5,6 +5,7 @@ WORKDIR /app # Ensure that the user can access all application files RUN chmod -R g+rwX /app +RUN chmod -R g+rwX /deleted # Install dependencies based on the preferred package manager COPY package.json package-lock.json* ./ diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index 6a72f4a..bba5db9 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -50,14 +50,15 @@ const fileTimeString = now.format('YYYY-MM-DD-HH-mm-ss'); const retainUntilString = retainUntil.format('YYYY-MM-DD HH:mm:ss.SSS ZZ'); // Create directory for entries -const dir = `./deleted/${fileTimeString}` +const dir = path.join(__dirname, '../', `deleted`,`${fileTimeString}`) if (!fs.existsSync(dir)){ fs.mkdirSync(dir, { recursive: true }); } // Create files -const vhersOutFile = path.join( __dirname, 'deleted', fileTimeString, 'vhers_audit_log.csv'); -const pinOutFile = path.join( __dirname, 'deleted', fileTimeString, 'pin_audit_log.csv'); +const vhersOutFile = path.join( __dirname, '../', 'deleted', fileTimeString, 'vhers_audit_log.csv'); +const pinOutFile = path.join( __dirname, '../', 'deleted', fileTimeString, 'pin_audit_log.csv'); + const vhersWriteStream = fs.createWriteStream(vhersOutFile); const pinWriteStream = fs.createWriteStream(pinOutFile); From ce927d72e3509fccd1b7cdf8bb33968c260ebb3b Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 11:59:26 -0700 Subject: [PATCH 21/29] dockerfile change --- containers/db_cleanup/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/containers/db_cleanup/Dockerfile b/containers/db_cleanup/Dockerfile index ca480f0..98dd464 100644 --- a/containers/db_cleanup/Dockerfile +++ b/containers/db_cleanup/Dockerfile @@ -1,11 +1,13 @@ # Install dependencies only when needed FROM registry.access.redhat.com/ubi8/nodejs-16 AS deps USER 0 +WORKDIR /deleted +RUN chmod -R g+rwX /deleted WORKDIR /app # Ensure that the user can access all application files RUN chmod -R g+rwX /app -RUN chmod -R g+rwX /deleted + # Install dependencies based on the preferred package manager COPY package.json package-lock.json* ./ From f3739eb54419168758d4d58e3e9a817dc39ce93b Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 12:04:58 -0700 Subject: [PATCH 22/29] change cron schedule back to what it should be --- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index e5d68c1..4868eb9 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -68,10 +68,9 @@ objects: metadata: name: db-cleanup-cronjob spec: - # schedule: "30 2 * * SUN" + schedule: "30 2 * * SUN" # Different schedule for test purposes - # change for rebuild - schedule: "*/5 * * * *" + # schedule: "*/5 * * * *" startingDeadlineSeconds: 200 concurrencyPolicy: Forbid successfulJobsHistoryLimit: 3 From 3fabd8c2e85f3094a7542b8be666771a7c5bda50 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 12:05:28 -0700 Subject: [PATCH 23/29] remove tmp change to allow build on push --- .github/workflows/.merge.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/.merge.yml b/.github/workflows/.merge.yml index 242968d..a27ebf6 100644 --- a/.github/workflows/.merge.yml +++ b/.github/workflows/.merge.yml @@ -5,9 +5,6 @@ on: branches: [dev] types: [closed] # temp change to force a deployment - push: - branches: - - "*" workflow_dispatch: concurrency: From 8eeae15ff26faefeeb137b4c906213f7adfbdd37 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 12:21:58 -0700 Subject: [PATCH 24/29] deployment config change --- .../db_cleanup/openshift/templates/db-cleanup-dc.yaml | 10 +++++----- openshift.deploy.yml | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index 4868eb9..d7955d8 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -95,27 +95,27 @@ objects: valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: db-name-dev + key: app-db-name-prod - name: DB_HOST valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: db-hostname-dev + key: app-db-hostname-prod - name: DB_PORT valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: db-port-dev + key: app-db-port-prod - name: DB_USER valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: superuser-username-dev + key: superuser-username-prod - name: DB_PASSWORD valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: superuser-password-dev + key: superuser-password-prod - name: RENTENTION_MONTHS valueFrom: secretKeyRef: diff --git a/openshift.deploy.yml b/openshift.deploy.yml index 59716aa..33bc2c3 100644 --- a/openshift.deploy.yml +++ b/openshift.deploy.yml @@ -141,27 +141,27 @@ objects: valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: db-hostname-dev + key: app-db-hostname-prod - name: DB_PORT valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: db-port-dev + key: app-db-port-prod - name: DB_NAME valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: db-name-dev + key: app-db-name-prod - name: DB_USERNAME valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: app-db-username-dev + key: app-db-username-prod - name: DB_PASSWORD valueFrom: secretKeyRef: name: patroni-ha-postgres-instance - key: app-db-password-dev + key: app-db-password-prod ports: - containerPort: 3500 protocol: TCP From fb71cc2c0c575b702b821f4c4c0746c3b2ed8b94 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 12:22:25 -0700 Subject: [PATCH 25/29] merge change --- .github/workflows/.merge.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/.merge.yml b/.github/workflows/.merge.yml index a27ebf6..20c759c 100644 --- a/.github/workflows/.merge.yml +++ b/.github/workflows/.merge.yml @@ -4,6 +4,8 @@ on: pull_request: branches: [dev] types: [closed] + push: + branches: "*" # temp change to force a deployment workflow_dispatch: From fb84d7ef6ca4de6e030fefc27538c3eb42bdef31 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 12:31:26 -0700 Subject: [PATCH 26/29] test schedule --- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index d7955d8..8750f28 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -68,9 +68,9 @@ objects: metadata: name: db-cleanup-cronjob spec: - schedule: "30 2 * * SUN" + # schedule: "30 2 * * SUN" # Different schedule for test purposes - # schedule: "*/5 * * * *" + schedule: "*/5 * * * *" startingDeadlineSeconds: 200 concurrencyPolicy: Forbid successfulJobsHistoryLimit: 3 From d31d2f1975ed5d004df8e1e3a1e1d342a2a14134 Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 13:08:32 -0700 Subject: [PATCH 27/29] change --- openshift.deploy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openshift.deploy.yml b/openshift.deploy.yml index 33bc2c3..6aa82af 100644 --- a/openshift.deploy.yml +++ b/openshift.deploy.yml @@ -24,7 +24,7 @@ parameters: - name: CPU_REQUEST value: "100m" - name: CPU_LIMIT - value: "1100m" + value: "500m" - name: MEMORY_REQUEST value: "500M" - name: MEMORY_LIMIT From 4b5d73994b0d84e0609c1f8ee544aa417f13806d Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 13:13:05 -0700 Subject: [PATCH 28/29] change promote --- openshift.deploy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openshift.deploy.yml b/openshift.deploy.yml index 6aa82af..8e661dd 100644 --- a/openshift.deploy.yml +++ b/openshift.deploy.yml @@ -39,7 +39,7 @@ parameters: value: bcgov - name: PROMOTE description: Image (namespace/name:tag) to promote/import - value: bcgov/vhers-virus-scan-tools/vhers-virus-scan:latest + value: bcgov/bc-emli-vhers-scan/vhers-virus-scan:latest objects: - kind: ImageStream apiVersion: v1 From 6ca6479794130c16977eeb3ab815ae7ed5d181cb Mon Sep 17 00:00:00 2001 From: hannah-macdonald1 Date: Fri, 2 Feb 2024 13:32:47 -0700 Subject: [PATCH 29/29] final changes --- .github/workflows/.merge.yml | 3 --- containers/db_cleanup/cleanup.js | 4 ++-- containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml | 4 ++-- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/.github/workflows/.merge.yml b/.github/workflows/.merge.yml index 20c759c..e624c16 100644 --- a/.github/workflows/.merge.yml +++ b/.github/workflows/.merge.yml @@ -4,9 +4,6 @@ on: pull_request: branches: [dev] types: [closed] - push: - branches: "*" - # temp change to force a deployment workflow_dispatch: concurrency: diff --git a/containers/db_cleanup/cleanup.js b/containers/db_cleanup/cleanup.js index bba5db9..94609f3 100644 --- a/containers/db_cleanup/cleanup.js +++ b/containers/db_cleanup/cleanup.js @@ -124,8 +124,8 @@ function delete_entries() { () => { vhersClient.end(); pinClient.end(); - sleep(240000).then(() => {console.log(`Successfully deleted audit log entries prior to ${retainUntilString}`); - process.exit(0);}) + console.log(`Successfully deleted audit log entries prior to ${retainUntilString}`); + process.exit(0); }, (err) => { console.log(err); diff --git a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml index 8750f28..d7955d8 100644 --- a/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml +++ b/containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml @@ -68,9 +68,9 @@ objects: metadata: name: db-cleanup-cronjob spec: - # schedule: "30 2 * * SUN" + schedule: "30 2 * * SUN" # Different schedule for test purposes - schedule: "*/5 * * * *" + # schedule: "*/5 * * * *" startingDeadlineSeconds: 200 concurrencyPolicy: Forbid successfulJobsHistoryLimit: 3