Skip to content

Commit

Permalink
Merge branch 'release.24.12' into next
Browse files Browse the repository at this point in the history
  • Loading branch information
ar2rsawseen committed Jan 8, 2025
2 parents cf036d1 + 52a5c4f commit 2e56c57
Show file tree
Hide file tree
Showing 9 changed files with 145 additions and 60 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ Fixes:
Features:
- [user-management] Global admins can now disable 2FA for individual users

Fixes:
- [gridfs] fixes for moving to Promises

Dependencies:
- Bump express from 4.21.1 to 4.21.2
- Bump mocha from 10.2.0 to 10.8.2
Expand Down
10 changes: 5 additions & 5 deletions Dockerfile-api
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM node:hydrogen-bullseye-slim
FROM node:iron-bookworm-slim

ARG COUNTLY_PLUGINS=mobile,web,desktop,plugins,density,locale,browser,sources,views,logger,systemlogs,populator,reports,crashes,push,star-rating,slipping-away-users,compare,server-stats,dbviewer,times-of-day,compliance-hub,alerts,onboarding,consolidate,remote-config,hooks,dashboards,sdk,data-manager,guides
# Countly Enterprise:
Expand All @@ -25,15 +25,14 @@ COPY . .

# install required dependencies which slim image doesn't have
RUN apt-get update && \
apt-get install -y iputils-ping procps net-tools telnet apt-transport-https curl wget git python2 make gcc g++ unzip && \
ln -s /usr/bin/python2.7 /usr/bin/python
apt-get install -y iputils-ping procps net-tools telnet apt-transport-https curl wget git make gcc g++ unzip xz-utils

RUN apt-get update && \
apt-get upgrade -y && \
cd /usr/src && \
wget https://www.python.org/ftp/python/3.8.12/Python-3.8.12.tar.xz && \
tar -xf Python-3.8.12.tar.xz && \
apt-get install -y build-essential sudo zlib1g-dev libssl1.1 libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev libffi-dev curl libbz2-dev && \
apt-get install -y build-essential sudo zlib1g-dev libssl3 libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev libffi-dev curl libbz2-dev && \
cd Python-3.8.12 && \
./configure --enable-optimizations --enable-shared && \
make && \
Expand All @@ -51,14 +50,15 @@ RUN curl -s -L -o /tmp/tini.deb "https://github.com/krallin/tini/releases/downlo
# preinstall
cp -n ./api/config.sample.js ./api/config.js && \
cp -n ./frontend/express/config.sample.js ./frontend/express/config.js && \
HOME=/tmp npm install -g npm@latest && \
HOME=/tmp npm install --unsafe-perm=true --allow-root && \
HOME=/tmp npm install argon2 --build-from-source --unsafe-perm=true --allow-root && \
./bin/docker/preinstall.sh && \
bash /opt/countly/bin/scripts/detect.init.sh && \
\
# cleanup & chown
npm remove -y --no-save mocha nyc should supertest && \
apt-get remove -y git gcc g++ make automake autoconf libtool pkg-config unzip sqlite3 && \
apt-get remove -y git gcc g++ make automake autoconf libtool pkg-config unzip sqlite3 wget && \
apt-get install -y libgbm-dev libgbm1 gconf-service libasound2 libatk1.0-0 libatk-bridge2.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils && \
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && \
rm -rf test /tmp/* /tmp/.??* /var/tmp/* /var/tmp/.??* /var/log/* /root/.npm && \
Expand Down
3 changes: 2 additions & 1 deletion Dockerfile-centos-api
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ RUN yum update -y
RUN curl -s -L -o /tmp/tini.rpm "https://github.com/krallin/tini/releases/download/v${TINI_VERSION}/tini_${TINI_VERSION}.rpm" && \
rpm -i /tmp/tini.rpm && \
\
curl -sL https://rpm.nodesource.com/setup_18.x | bash - && \
curl -sL https://rpm.nodesource.com/setup_20.x | bash - && \
yum install -y nodejs python3.8 python2 python38-libs python38-devel python38-pip nss libdrm libgbm cyrus-sasl* && \
ln -s /usr/bin/node /usr/bin/nodejs && \
unlink /usr/bin/python3 && \
Expand All @@ -53,6 +53,7 @@ RUN curl -s -L -o /tmp/tini.rpm "https://github.com/krallin/tini/releases/downlo
# preinstall
cp -n ./api/config.sample.js ./api/config.js && \
cp -n ./frontend/express/config.sample.js ./frontend/express/config.js && \
HOME=/tmp npm install -g npm@latest && \
HOME=/tmp npm install --unsafe-perm=true --allow-root && \
HOME=/tmp npm install argon2 --build-from-source --unsafe-perm=true --allow-root && \
./bin/docker/preinstall.sh && \
Expand Down
3 changes: 2 additions & 1 deletion Dockerfile-centos-frontend
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ RUN yum update -y
RUN curl -s -L -o /tmp/tini.rpm "https://github.com/krallin/tini/releases/download/v${TINI_VERSION}/tini_${TINI_VERSION}.rpm" && \
rpm -i /tmp/tini.rpm && \
\
curl -sL https://rpm.nodesource.com/setup_18.x | bash - && \
curl -sL https://rpm.nodesource.com/setup_20.x | bash - && \
yum install -y nodejs python3.8 python2 python38-libs python38-devel python38-pip nss libdrm libgbm cyrus-sasl* && \
ln -s /usr/bin/node /usr/bin/nodejs && \
unlink /usr/bin/python3 && \
Expand All @@ -52,6 +52,7 @@ RUN curl -s -L -o /tmp/tini.rpm "https://github.com/krallin/tini/releases/downlo
cp -n ./frontend/express/public/javascripts/countly/countly.config.sample.js ./frontend/express/public/javascripts/countly/countly.config.js && \
cp -n ./frontend/express/config.sample.js ./frontend/express/config.js && \
cp -n ./api/config.sample.js ./api/config.js && \
HOME=/tmp npm install -g npm@latest && \
HOME=/tmp npm install --unsafe-perm=true --allow-root && \
HOME=/tmp npm install argon2 --build-from-source --unsafe-perm=true --allow-root && \
./bin/docker/preinstall.sh && \
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile-core
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ RUN useradd -r -M -U -d /opt/countly -s /bin/false countly && \
gcc g++ make binutils autoconf automake autotools-dev libtool pkg-config zlib1g-dev libcunit1-dev libssl-dev libxml2-dev libev-dev \
libevent-dev libjansson-dev libjemalloc-dev cython python3-dev python-setuptools && \
# node
wget -qO- https://deb.nodesource.com/setup_18.x | bash - && \
wget -qO- https://deb.nodesource.com/setup_20.x | bash - && \
# data_migration (mongo clients)
wget -qO - https://www.mongodb.org/static/pgp/server-6.0.asc | sudo apt-key add - && \
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/6.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-6.0.list && \
Expand Down
10 changes: 5 additions & 5 deletions Dockerfile-frontend
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM node:hydrogen-bullseye-slim
FROM node:iron-bookworm-slim

ARG COUNTLY_PLUGINS=mobile,web,desktop,plugins,density,locale,browser,sources,views,logger,systemlogs,populator,reports,crashes,push,star-rating,slipping-away-users,compare,server-stats,dbviewer,times-of-day,compliance-hub,alerts,onboarding,consolidate,remote-config,hooks,dashboards,sdk,data-manager,guides
# Countly Enterprise:
Expand All @@ -21,15 +21,14 @@ WORKDIR /opt/countly
COPY . .
# install required dependencies which slim image doesn't have
RUN apt-get update && \
apt-get install -y iputils-ping net-tools telnet apt-transport-https procps curl wget git python2 make gcc g++ unzip && \
ln -s /usr/bin/python2.7 /usr/bin/python
apt-get install -y iputils-ping net-tools telnet apt-transport-https procps curl wget git make gcc g++ unzip xz-utils

RUN apt-get update && \
apt-get upgrade -y && \
cd /usr/src && \
wget https://www.python.org/ftp/python/3.8.12/Python-3.8.12.tar.xz && \
tar -xf Python-3.8.12.tar.xz && \
apt-get install -y build-essential sudo zlib1g-dev libssl1.1 libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev libffi-dev curl libbz2-dev && \
apt-get install -y build-essential sudo zlib1g-dev libssl3 libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev libreadline-dev libffi-dev curl libbz2-dev && \
cd Python-3.8.12 && \
./configure --enable-optimizations --enable-shared && \
make && \
Expand All @@ -48,6 +47,7 @@ RUN curl -s -L -o /tmp/tini.deb "https://github.com/krallin/tini/releases/downlo
cp -n ./api/config.sample.js ./api/config.js && \
cp -n ./frontend/express/config.sample.js ./frontend/express/config.js && \
cp -n ./frontend/express/public/javascripts/countly/countly.config.sample.js ./frontend/express/public/javascripts/countly/countly.config.js && \
HOME=/tmp npm install -g npm@latest && \
HOME=/tmp npm install --unsafe-perm=true --allow-root && \
HOME=/tmp npm install argon2 --build-from-source --unsafe-perm=true --allow-root && \
./bin/docker/preinstall.sh && \
Expand All @@ -56,7 +56,7 @@ RUN curl -s -L -o /tmp/tini.deb "https://github.com/krallin/tini/releases/downlo
\
# cleanup & chown
npm remove -y --no-save mocha nyc should supertest puppeteer && \
apt-get remove -y git gcc g++ make automake autoconf libtool pkg-config unzip sqlite3 && \
apt-get remove -y git gcc g++ make automake autoconf libtool pkg-config unzip sqlite3 wget && \
apt-get autoremove -y && \
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && \
rm -rf test /tmp/* /tmp/.??* /var/tmp/* /var/tmp/.??* /var/log/* /root/.npm && \
Expand Down
113 changes: 71 additions & 42 deletions api/utils/countlyFs.js
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ countlyFs.gridfs = {};
**/
function beforeSave(category, filename, options, callback, done) {
log.d("checking file", filename);
ob.getId(category, filename, function(err, res) {
ob.getId(category, filename, async function(err, res) {
log.d("file state", filename, err, res);
if (options.forceClean) {
ob.clearFile(category, filename, done);
Expand All @@ -80,15 +80,20 @@ countlyFs.gridfs = {};
else if (options.writeMode === "overwrite") {
var bucket = new GridFSBucket(db, { bucketName: category });
log.d("deleting file", filename);
bucket.delete(res, function(error) {
log.d("deleted", filename, error);
if (!error) {
setTimeout(done, 1);
}
else if (callback) {
callback(error);
}
});
let errHandle = null;
try {
await bucket.delete(res);
}
catch (error) {
errHandle = error;
}
log.d("deleted", filename, errHandle);
if (!errHandle) {
setTimeout(done, 1);
}
else if (callback) {
callback(errHandle);
}
}
else {
if (callback) {
Expand Down Expand Up @@ -116,6 +121,7 @@ countlyFs.gridfs = {};
* });
*/
ob.getId = function(category, filename, callback) {
log.d("getId", category, filename);
db.collection(category + ".files").findOne({ filename: filename }, {_id: 1}, function(err, res) {
if (callback) {
callback(err, (res && res._id) ? res._id : false);
Expand Down Expand Up @@ -144,6 +150,7 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}
log.d("exists", category, dest, options);
var query = {};
if (options.id) {
query._id = options.id;
Expand Down Expand Up @@ -184,7 +191,7 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}

log.d("saveFile", category, dest, source, options);
var filename = dest.split(path.sep).pop();
beforeSave(category, filename, options, callback, function() {
save(category, filename, fs.createReadStream(source), options, callback);
Expand Down Expand Up @@ -218,6 +225,7 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}
log.d("saveData", category, dest, typeof data, options);
beforeSave(category, filename, options, callback, function() {
var readStream = new Readable;
readStream.push(data);
Expand Down Expand Up @@ -253,6 +261,7 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}
log.d("saveStream", category, dest, typeof readStream, options);
beforeSave(category, filename, options, callback, function() {
save(category, filename, readStream, options, callback);
});
Expand All @@ -271,7 +280,7 @@ countlyFs.gridfs = {};
* console.log("Finished", err);
* });
*/
ob.rename = function(category, dest, source, options, callback) {
ob.rename = async function(category, dest, source, options, callback) {
var newname = dest.split(path.sep).pop();
var oldname = source.split(path.sep).pop();
if (typeof options === "function") {
Expand All @@ -281,25 +290,35 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}

log.d("rename", category, dest, source, options);
if (options.id) {
let bucket = new GridFSBucket(db, { bucketName: category });
bucket.rename(options.id, newname, function(error) {
if (callback) {
callback(error);
}
});
let errHandle = null;
try {
await bucket.rename(options.id, newname);
}
catch (error) {
errHandle = error;
}
if (callback) {
callback(errHandle);
}
}
else {
db.collection(category + ".files").findOne({ filename: oldname }, {_id: 1}, function(err, res) {
db.collection(category + ".files").findOne({ filename: oldname }, {_id: 1}, async function(err, res) {
if (!err) {
if (res && res._id) {
let bucket = new GridFSBucket(db, { bucketName: category });
bucket.rename(res._id, newname, function(error) {
if (callback) {
callback(error);
}
});
let errHandle = null;
try {
await bucket.rename(res._id, newname);
}
catch (error) {
errHandle = error;
}
if (callback) {
callback(errHandle);
}
}
else {
if (callback) {
Expand Down Expand Up @@ -391,7 +410,7 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}

log.d("deleteFile", category, dest, options);
if (options.id) {
ob.deleteFileById(category, options.id, callback);
}
Expand Down Expand Up @@ -426,13 +445,19 @@ countlyFs.gridfs = {};
* console.log("Finished", err);
* });
*/
ob.deleteAll = function(category, dest, callback) {
ob.deleteAll = async function(category, dest, callback) {
log.d("deleteAll", category, dest);
var bucket = new GridFSBucket(db, { bucketName: category });
bucket.drop(function(error) {
if (callback) {
callback(error);
}
});
let errHandle = null;
try {
await bucket.drop();
}
catch (error) {
errHandle = error;
}
if (callback) {
callback(errHandle);
}
};

/**
Expand All @@ -457,7 +482,7 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}

log.d("getStream", category, dest, options);
if (callback) {
if (options.id) {
ob.getStreamById(category, options.id, callback);
Expand Down Expand Up @@ -490,7 +515,7 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}

log.d("getData", category, dest, options);
if (options.id) {
ob.getDataById(category, options.id, callback);
}
Expand Down Expand Up @@ -536,7 +561,7 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}

log.d("getSize", category, dest, options);
var query = {};
if (options.id) {
query._id = options.id;
Expand Down Expand Up @@ -571,7 +596,7 @@ countlyFs.gridfs = {};
if (!options) {
options = {};
}

log.d("getStats", category, dest, options);
var query = {};
if (options.id) {
query._id = options.id;
Expand Down Expand Up @@ -608,6 +633,7 @@ countlyFs.gridfs = {};
* });
*/
ob.getDataById = function(category, id, callback) {
log.d("getDataById", category, id);
var bucket = new GridFSBucket(db, { bucketName: category });
var downloadStream = bucket.openDownloadStream(id);
downloadStream.on('error', function(error) {
Expand Down Expand Up @@ -639,6 +665,7 @@ countlyFs.gridfs = {};
* });
*/
ob.getStreamById = function(category, id, callback) {
log.d("getStreamById", category, id);
if (callback) {
var bucket = new GridFSBucket(db, { bucketName: category });
callback(null, bucket.openDownloadStream(id));
Expand All @@ -656,17 +683,17 @@ countlyFs.gridfs = {};
* });
*/
ob.deleteFileById = async function(category, id, callback) {
log.d("deleteFileById", category, id);
var bucket = new GridFSBucket(db, { bucketName: category });
let errHandle = null;
try {
await bucket.delete(id);
if (callback) {
callback(null);
}
}
catch (ee) {
if (callback) {
callback(ee);
}
catch (error) {
errHandle = error;
}
if (callback) {
callback(errHandle);
}
};

Expand All @@ -681,6 +708,7 @@ countlyFs.gridfs = {};
* });
*/
ob.clearFile = function(category, filename, callback) {
log.d("clearFile", category, filename);
db.collection(category + ".files").deleteMany({ filename: filename }, function(err1, res1) {
log.d("deleting files", category, { filename: filename }, err1, res1 && res1.result);
db.collection(category + ".chunks").deleteMany({ files_id: filename }, function(err2, res2) {
Expand All @@ -697,6 +725,7 @@ countlyFs.gridfs = {};
* @param {function} callback - function called when files found or query errored, providing error object as first param and a list of filename, creation date and size as secondas second
*/
ob.listFiles = function(category, callback) {
log.d("listFiles", category);
const bucket = new GridFSBucket(db, { bucketName: category });
bucket.find().toArray()
.then((records) => callback(
Expand Down
Loading

0 comments on commit 2e56c57

Please sign in to comment.