Skip to content

Commit

Permalink
Merge pull request #38 from bcgov/deletion-cronjob
Browse files Browse the repository at this point in the history
Deletion cronjob
  • Loading branch information
hannah-macdonald1 authored Feb 2, 2024
2 parents dd229e7 + 6ca6479 commit 956d2d8
Show file tree
Hide file tree
Showing 8 changed files with 873 additions and 9 deletions.
12 changes: 10 additions & 2 deletions .github/workflows/.merge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,16 @@ jobs:
packages: write
strategy:
matrix:
package: [vhers-virus-scan]
package: [vhers-virus-scan, db-cleanup]
include:
- package: vhers-virus-scan
build_context: ./
triggers: ('vhers-virus-scan/')
build_file: Dockerfile
- package: db-cleanup
build_context: ./containers/db_cleanup
triggers: ('vhers-virus-scan/containers/db_cleanup')
build_file: Dockerfile
timeout-minutes: 10
steps:
- uses: bcgov-nr/[email protected]
Expand All @@ -84,12 +88,16 @@ jobs:
issues: write
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
name: [vhers-virus-scan]
name: [vhers-virus-scan, db-cleanup]
include:
- name: vhers-virus-scan
file: ./openshift.deploy.yml
overwrite: true
- name: db-cleanup
file: ./containers/db_cleanup/openshift/templates/db-cleanup-dc.yaml
overwrite: true

steps:
- uses: bcgov-nr/[email protected]
Expand Down
22 changes: 22 additions & 0 deletions containers/db_cleanup/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# Install dependencies only when needed
FROM registry.access.redhat.com/ubi8/nodejs-16 AS deps
USER 0
WORKDIR /deleted
RUN chmod -R g+rwX /deleted
WORKDIR /app

# Ensure that the user can access all application files
RUN chmod -R g+rwX /app


# Install dependencies based on the preferred package manager
COPY package.json package-lock.json* ./
RUN npm ci

COPY . .

USER 1001

# EXPOSE 3700

# ENV PORT 3700
141 changes: 141 additions & 0 deletions containers/db_cleanup/cleanup.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
require('dotenv').config();
const fs = require('fs');
const csv = require('csv');
const path = require('path');
const EOL = require('os').EOL;
const { Client } = require('pg');
const { to } = require('pg-copy-streams');
const dayjs = require('dayjs');
const utc = require('dayjs/plugin/utc');
const timezone = require('dayjs/plugin/timezone');
dayjs.extend(utc);
dayjs.extend(timezone);

// Set the default timezone for date objects to the system timezone
dayjs.tz.setDefault(dayjs.tz.guess());
// NOTE: Since we are using the postgres COPY command, this requires superadmin privileges
// DB configuration
const pg = require('knex')({
client: 'pg',
connection: {
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
},
});

const pinClient = new Client({
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
});

const vhersClient = new Client({
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
database: process.env.DB_NAME,
password: process.env.DB_PASSWORD,
});

// Get timestamps for now and deletion interval
const retentionIntervalMonths = Math.abs(parseInt(process.env.RENTENTION_MONTHS));
const now = dayjs();
const retainUntil = now.subtract(retentionIntervalMonths, 'month');
const fileTimeString = now.format('YYYY-MM-DD-HH-mm-ss');
const retainUntilString = retainUntil.format('YYYY-MM-DD HH:mm:ss.SSS ZZ');

// Create directory for entries
const dir = path.join(__dirname, '../', `deleted`,`${fileTimeString}`)
if (!fs.existsSync(dir)){
fs.mkdirSync(dir, { recursive: true });
}

// Create files
const vhersOutFile = path.join( __dirname, '../', 'deleted', fileTimeString, 'vhers_audit_log.csv');
const pinOutFile = path.join( __dirname, '../', 'deleted', fileTimeString, 'pin_audit_log.csv');

const vhersWriteStream = fs.createWriteStream(vhersOutFile);
const pinWriteStream = fs.createWriteStream(pinOutFile);

// Csv transforms
const parse = csv.parse();

const transform = csv.transform((row, cb) => {
result = row.join(',') + EOL;
cb(null, result);
});

const pinParse = csv.parse();

const pinTransform = csv.transform((row, cb) => {
result = row.join(',') + EOL;
cb(null, result);
});

// Copy functions
function async_vhers_output() {
return new Promise(function(resolve, reject) {
const vhersStream = vhersClient.query(to(`COPY (SELECT * FROM public.vhers_audit_log WHERE created_at < '${retainUntilString}') TO STDOUT WITH (FORMAT CSV, HEADER)`));
vhersStream.pipe(parse).pipe(transform).pipe(vhersWriteStream);
vhersStream.on('end', () => { return resolve()});
vhersStream.on('error', (err) => {return reject(err)});
})
}

function async_pin_output() {
return new Promise(function(resolve, reject) {
const pinStream = pinClient.query(to(`COPY (SELECT * FROM public.pin_audit_log WHERE log_created_at < '${retainUntilString}') TO STDOUT WITH (FORMAT CSV, HEADER)`));
pinStream.pipe(pinParse).pipe(pinTransform).pipe(pinWriteStream);
pinStream.on('end', () => { return resolve()});
pinStream.on('error', (err) => {return reject(err)});
})
}

// Copy function IIFE (this gets arounds not allowing async functions outside of modules)
( async() => {
await vhersClient.connect();
await pinClient.connect();
const promises = [];
promises.push(async_vhers_output());
promises.push(async_pin_output());
Promise.all(promises).then(function AcceptHandler() {
delete_entries();
}, function ErrorHandler(error) {
console.log(error);
process.exit(1);
});

})();

function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}

// Entry deletion function
function delete_entries() {
pg('vhers_audit_log').where('created_at', '<', retainUntilString).delete().then(
() => {
pg('pin_audit_log').where('log_created_at', '<', retainUntilString).delete().then(
() => {
vhersClient.end();
pinClient.end();
console.log(`Successfully deleted audit log entries prior to ${retainUntilString}`);
process.exit(0);
},
(err) => {
console.log(err);
process.exit(1);
}
);
},
(err) => {
console.log(err);
process.exit(1);
}
);
}
86 changes: 86 additions & 0 deletions containers/db_cleanup/openshift/templates/db-cleanup-bc.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
kind: Template
apiVersion: template.openshift.io/v1
metadata:
name: db-cleanup-build
parameters:
- name: NAME
displayName: Name
description: The name assigned to all of the objects defined in this template.
value: db-cleanup
required: true
- name: GIT_SOURCE_URL
displayName: GIT Source Repo URL
description: A GIT URL to your source code.
value: "https://github.com/bcgov/bc-emli-vhers-scan.git"
required: true
- name: GIT_REF
displayName: Git Reference
description: The git reference or branch.
value: dev
required: true
- name: SOURCE_CONTEXT_DIR
displayName: Source Context Directory
description: The source context directory.
value: containers/db_cleanup
- name: NAME_SPACE
displayName: Namespace for source image
value: c82b4c-tools
required: true
- name: CPU_REQUEST
value: "100m"
- name: CPU_LIMIT
value: "300m"
- name: MEMORY_REQUEST
value: "500Mi"
- name: MEMORY_LIMIT
value: "800Mi"
objects:
- kind: ImageStream
apiVersion: image.openshift.io/v1
metadata:
name: "${NAME}"

- kind: BuildConfig
apiVersion: build.openshift.io/v1
metadata:
name: "${NAME}-build"
labels:
app: "${NAME}"
spec:
runPolicy: Serial
completionDeadlineSeconds: 1800
source:
type: Git
git:
ref: "${GIT_REF}"
uri: "${GIT_SOURCE_URL}"
contextDir: "${SOURCE_CONTEXT_DIR}"
secrets:
- secret:
name: platform-services-controlled-etc-pki-entitlement
destinationDir: etc-pki-entitlement
configMaps:
- configMap:
name: platform-services-controlled-rhsm-conf
destinationDir: rhsm-conf
- configMap:
name: platform-services-controlled-rhsm-ca
destinationDir: rhsm-ca
strategy:
type: Docker
dockerStrategy:
from:
kind: ImageStreamTag
name: "ubi:latest"
namespace: "${NAME_SPACE}"
output:
to:
kind: ImageStreamTag
name: "${NAME}:latest"
resources:
requests:
cpu: ${CPU_REQUEST}
memory: ${MEMORY_REQUEST}
limits:
cpu: ${CPU_LIMIT}
memory: ${MEMORY_LIMIT}
Loading

0 comments on commit 956d2d8

Please sign in to comment.