Skip to content

Commit

Permalink
Merge pull request #779 from bcgov/test
Browse files Browse the repository at this point in the history
Create Latest Release
  • Loading branch information
ikethecoder authored Mar 13, 2023
2 parents 2b17589 + c227da7 commit 0844132
Show file tree
Hide file tree
Showing 32 changed files with 669 additions and 941 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ record:
- GatewayConfig.Publish
- Content.Publish
- CredentialIssuer.Admin
owner: [email protected]
owner: api-owner@local
environmentDetails:
- environment: prod
issuerUrl: '{OIDC_ISSUER}'
Expand Down
2 changes: 1 addition & 1 deletion feeds/ckan/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ async function scopedSync(
loadDatasetProducer(xfer, scopedDir, destinationUrl)
);

fs.rmdirSync(scopedDir, { recursive: true });
fs.rmSync(scopedDir, { recursive: true });
}

async function syncOrgs({ url, workingPath, destinationUrl }) {
Expand Down
63 changes: 24 additions & 39 deletions feeds/kong/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ async function scopedSyncByNamespace(
loadGroupsProducer(xfer, destinationUrl, '/feed/GatewayGroup')
);

xfer.resultCollector().output();

// remove any GatewayService or GatewayRoutes that no longer exist in Kong
const destination = portal(destinationUrl);

Expand All @@ -94,7 +96,7 @@ async function scopedSyncByNamespace(
}
}

fs.rmdirSync(scopedDir, { recursive: true });
fs.rmSync(scopedDir, { recursive: true });
}

async function scopedSyncByConsumer(
Expand Down Expand Up @@ -122,7 +124,9 @@ async function scopedSyncByConsumer(
)
);

fs.rmdirSync(scopedDir, { recursive: true });
xfer.resultCollector().output();

fs.rmSync(scopedDir, { recursive: true });
}

async function sync({ url, workingPath, destinationUrl }) {
Expand Down Expand Up @@ -166,9 +170,13 @@ async function sync({ url, workingPath, destinationUrl }) {
'/feed/GatewayConsumer'
)
);

await xfer.concurrentWork(
loadGroupsProducer(xfer, destinationUrl, '/feed/GatewayGroup')
);

xfer.resultCollector().output();

//await xfer.concurrentWork(loadProducer(xfer, destinationUrl, 'gw-products', 'name', 'product', '/feed/Product'))
//await xfer.concurrentWork(loadServiceAccessProducer(xfer, destinationUrl, 'gw-consumers', '/feed/ServiceAccess'))
}
Expand All @@ -181,26 +189,11 @@ function loadProducer(xfer, destinationUrl, file, name, type, feedPath) {
type == 'consumer' ? xfer.get_json_content('gw-acls')['data'] : null;
let index = 0;

log.info('[producer] %s : %d records', file, items.length);

const results = {
'no-change': 0,
created: 0,
'created-failed': 0,
updated: 0,
deleted: 0,
'updated-failed': 0,
'deleted-failed': 0,
};
log.debug('[producer] %s : %d records', file, items.length);

return () => {
if (index == items.length) {
Object.keys(results)
.filter((r) => results[r] != 0)
.forEach((r) => {
log.info('[%s] %d', String(r).padStart(15, ' '), results[r]);
});
log.info('Finished producing ' + index + ' records.');
log.info('[producer] %s : Finished %d records', file, items.length);
return null;
}
const item = items[index];
Expand Down Expand Up @@ -244,12 +237,14 @@ function loadProducer(xfer, destinationUrl, file, name, type, feedPath) {
return destination
.fireAndForget(feedPath, item)
.then((result) => {
results[result['result']]++;
xfer.resultCollector().inc(result['result']);

log.debug('%s -> %s OK', file, result);
})
.catch((err) => {
log.error(`[${nm}] ERR ${err}`);
xfer.resultCollector().inc('exception');
log.error('%s', err);
log.error('%j', item);
});
};
}
Expand Down Expand Up @@ -279,28 +274,17 @@ function loadGroupsProducer(xfer, destinationUrl, feedPath) {
});
});

log.info('[loadGroupsProducer] (%s) %d records', feedPath, items.length);
log.debug('[loadGroupsProducer] (%s) %d records', feedPath, items.length);

let index = 0;

const results = {
'no-change': 0,
created: 0,
'created-failed': 0,
updated: 0,
deleted: 0,
'updated-failed': 0,
'deleted-failed': 0,
};

return () => {
if (index == items.length) {
Object.keys(results)
.filter((r) => results[r] != 0)
.forEach((r) => {
log.info('[%s] %d', String(r).padStart(15, ' '), results[r]);
});
log.info('Finished producing ' + index + ' records.');
log.info(
'[loadGroupsProducer] %s : Finished %d records',
feedPath,
items.length
);
return null;
}
const item = items[index];
Expand All @@ -316,10 +300,11 @@ function loadGroupsProducer(xfer, destinationUrl, feedPath) {
return destination
.fireAndForget(feedPath, item)
.then((result) => {
results[result['result']]++;
xfer.resultCollector().inc(result['result']);
log.debug('%s -> %s OK', feedPath, result);
})
.catch((err) => {
xfer.resultCollector().inc('exception');
log.error(`[${nm}] ERR ${err}`);
});
};
Expand Down
7 changes: 6 additions & 1 deletion feeds/logger.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const winston = require('winston');
const { createLogger, format, transports } = require('winston');

const enumerateErrorFormat = winston.format((info) => {
if (info instanceof Error) {
Expand All @@ -14,12 +15,16 @@ function Logger(category) {
level: process.env.LOG_LEVEL || 'debug',
format: winston.format.combine(
enumerateErrorFormat(),
format.timestamp({
format: 'YYYY-MM-DD HH:mm:ss',
}),
process.env.NODE_ENV === 'production'
? winston.format.uncolorize()
: winston.format.colorize(),
winston.format.splat(),
winston.format.printf(
({ level, message, stack }) => `${level}: [${category}] ${message}`
({ timestamp, level, message, stack }) =>
`${timestamp} ${level}: [${category}] ${message}`
)
),
transports: [
Expand Down
18 changes: 15 additions & 3 deletions feeds/prometheus/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ async function syncQueryRanges(
await xfer.concurrentWork(
producer(xfer, 'query_range', params.numDays, destinationUrl)
);

xfer.resultCollector().output();
}

async function syncQueries(
Expand Down Expand Up @@ -126,6 +128,8 @@ async function syncQueries(
await xfer.concurrentWork(
producer(xfer, 'query', params.numDays, destinationUrl)
);

xfer.resultCollector().output();
}

function producer(xfer, queryType, numDays, destinationUrl) {
Expand All @@ -139,7 +143,7 @@ function producer(xfer, queryType, numDays, destinationUrl) {
const results = Array.isArray(json['data'])
? json['data'][0]['result']
: json['data']['result'];
log.info(
log.debug(
'[producer] %s %s : %d records',
target,
_query.id,
Expand Down Expand Up @@ -180,8 +184,16 @@ function producer(xfer, queryType, numDays, destinationUrl) {

return destination
.fireAndForget('/feed/Metric', item.metric)
.then((result) => log.debug(`[${name}] OK`, result))
.catch((err) => log.error(`[${name}] ERR ${err}`));
.then((result) => {
xfer.resultCollector().inc(result['result']);

log.debug('%s -> %s OK', name, result);
})
.catch((err) => {
xfer.resultCollector().inc('exception');
log.error('%s', err);
log.error('%j', item);
});
};
}

Expand Down
7 changes: 5 additions & 2 deletions feeds/utils/portal.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
const fs = require('fs');
const fetch = require('node-fetch');
const { Logger } = require('../logger');

const log = Logger('utils.portal');

const checkStatus = require('./checkStatus').checkStatus;

Expand All @@ -25,7 +28,7 @@ function portal(baseUrl, logFeeds = _logFeeds) {
let retry = attempts;
while (retry <= attempts) {
retry != attempts &&
console.log('Retrying [' + (attempts - retry) + '] ' + url);
log.error('Retrying [' + (attempts - retry) + '] ' + url);
try {
return await fetch(baseUrl + url, {
method: 'put',
Expand All @@ -47,7 +50,7 @@ function portal(baseUrl, logFeeds = _logFeeds) {
let retry = attempts;
while (retry <= attempts) {
retry != attempts &&
console.log('Retrying [' + (attempts - retry) + '] ' + url);
log.error('Retrying [' + (attempts - retry) + '] ' + url);
try {
return await fetch(baseUrl + url, {
method: 'delete',
Expand Down
35 changes: 35 additions & 0 deletions feeds/utils/results.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
const { Logger } = require('../logger');

const log = Logger('results');

class ResultsService {
init() {
this.results = {
'no-change': 0,
exception: 0,
created: 0,
'created-failed': 0,
updated: 0,
deleted: 0,
'updated-failed': 0,
'deleted-failed': 0,
};
}

inc(result) {
this.results[result]++;
}

output() {
const { results } = this;
Object.keys(results)
.filter((r) => results[r] != 0)
.forEach((r) => {
log.info('[%s] %d', String(r).padStart(15, ' '), results[r]);
});
}
}

module.exports = {
ResultsService,
};
8 changes: 7 additions & 1 deletion feeds/utils/transfers.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,19 @@ const PromisePool = require('es6-promise-pool');
const { checkStatus } = require('./checkStatus');
const { Logger } = require('../logger');
const stringify = require('json-stable-stringify');
const { ResultsService } = require('./results');

const log = Logger('utils.xfer');

function transfers(workingPath, baseUrl, exceptions) {
fs.mkdirSync(workingPath, { recursive: true });

const resultCollector = new ResultsService();
resultCollector.init();

return {
resultCollector: () => resultCollector,

copy: async function (_url, filename, index = 0) {
log.debug('[copy] %s%s', baseUrl, _url);
const out = workingPath + '/' + filename + '-' + index + '.json';
Expand Down Expand Up @@ -79,7 +85,7 @@ function transfers(workingPath, baseUrl, exceptions) {
// Wait for the pool to settle.
return poolPromise.then(
function () {
log.info('All promises fulfilled');
log.debug('All promises fulfilled');
},
function (error) {
log.error('Some promise rejected: ' + error.message);
Expand Down
Loading

0 comments on commit 0844132

Please sign in to comment.