Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SUP-17558: Fix the abandoned binary data, caused by the unstable natu… #1648

Merged
merged 10 commits into from
Jan 28, 2025
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Core: The processing of binaries has been refactored, targeting possible binary data leftovers being eliminated after the binary content is updated or deleted. Also, two new async consistency check have been added, for the detection of binary data with no corresponding database records (repairable) and binary database records with no corresponding binary data (for the automated testing purposes, not repairable).
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,13 @@ public interface BulkActionContext {
*/
EventQueueBatch batch();

/**
* Override the installed batch with the new one, transferring all its entries.
*
* @param batch
*/
void setBatch(EventQueueBatch batch);

/**
* Shortcut for {@link #batch()#add(MeshEventModel)}
*
Expand All @@ -68,5 +75,4 @@ default void setRootCause(ElementType type, String uuid, EventCauseAction action
* @param action
*/
void add(Completable action);

}
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ public class BulkActionContextImpl implements BulkActionContext {

private static final Logger log = LoggerFactory.getLogger(BulkActionContextImpl.class);

private static final int DEFAULT_BATCH_SIZE = 100;
public static final int DEFAULT_BATCH_SIZE = 100;

private final AtomicLong batchCounter = new AtomicLong(1);
private final AtomicLong elementCounter = new AtomicLong(0);
Expand All @@ -50,18 +50,25 @@ public void process() {
process(false);
}

@Override
public void setBatch(EventQueueBatch batch) {
batch.addAll(this.batch);
this.batch = batch;
}

@Override
public void process(boolean force) {
if (elementCounter.incrementAndGet() >= DEFAULT_BATCH_SIZE || force) {
log.info("Processing transaction batch {" + batchCounter.get() + "}. I counted {" + elementCounter.get() + "} elements.");
log.info("Processing transaction batch {" + batchCounter.get() + "}. I counted {" + elementCounter.get() + "} elements. {" + asyncActions.size() + "} to be executed.");
// Check before commit to ensure we are 100% safe
db.blockingTopologyLockCheck();
Tx.get().commit();
Tx.maybeGet().ifPresent(Tx::commit);
plyhun marked this conversation as resolved.
Show resolved Hide resolved
Completable.merge(asyncActions).subscribe(() -> {
log.trace("Async action processed");
});
batch().dispatch();
// Reset the counter back to zero
// Reset the context
asyncActions.clear();
elementCounter.set(0);
batchCounter.incrementAndGet();
}
Expand All @@ -76,5 +83,4 @@ public EventQueueBatch batch() {
public void add(Completable action) {
asyncActions.add(action);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,9 @@ public Single<CacheFileInfo> getCacheFilePath(HibBinary binary, ImageManipulatio

String sha512 = binary.getSHA512Sum();
return getCacheFilePathNew(binary, parameters).onErrorResumeNext(e -> {
log.debug("New Image Cache miss", e);
if (log.isDebugEnabled()) {
log.debug("New Image Cache miss", e);
}
return getCacheFilePathOld(
sha512,
parameters,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import com.gentics.mesh.MeshVersion;
import com.gentics.mesh.cache.CacheRegistry;
import com.gentics.mesh.changelog.highlevel.HighLevelChangelogSystem;
import com.gentics.mesh.context.impl.DummyBulkActionContext;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.core.data.HibLanguage;
import com.gentics.mesh.core.data.HibMeshVersion;
import com.gentics.mesh.core.data.dao.GroupDao;
Expand All @@ -45,6 +45,7 @@
import com.gentics.mesh.core.data.search.IndexHandler;
import com.gentics.mesh.core.data.service.ServerSchemaStorageImpl;
import com.gentics.mesh.core.data.user.HibUser;
import com.gentics.mesh.core.db.CommonTx;
import com.gentics.mesh.core.db.Database;
import com.gentics.mesh.core.db.Tx;
import com.gentics.mesh.core.endpoint.admin.LocalConfigApi;
Expand Down Expand Up @@ -644,14 +645,16 @@ private void handleLocalData(PostProcessFlags flags, MeshOptions configuration,
private void checkImageCacheMigrated() throws IOException {
Path imageCachePath = Path.of(options.getImageOptions().getImageCacheDirectory());
if (Files.exists(imageCachePath) && Files.list(imageCachePath).filter(path -> path.getFileName().toString().length() == 8).count() > 0) {
db().tx(tx -> {
db().singleTx(tx -> {
log.info("Image cache requires migration, triggering the corresponding Job.");
BulkActionContext bac = tx.<CommonTx>unwrap().data().getOrCreateBulkActionContext();
tx.jobDao().findAll().stream().filter(job -> job.getType() == JobType.imagecache).forEach(job -> {
tx.jobDao().delete(job, new DummyBulkActionContext());
tx.jobDao().delete(job);
});
tx.jobDao().enqueueImageCacheMigration(tx.userDao().findByUsername("admin"));
MeshEvent.triggerJobWorker(mesh);
});
return bac;
}).doOnSuccess(bac -> bac.process(true)).subscribe();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ public boolean update(Tx tx, HibBranch branch, InternalActionContext ac, EventQu
}

@Override
public void delete(Tx tx, HibBranch element, BulkActionContext bac) {
public void delete(Tx tx, HibBranch element) {
plyhun marked this conversation as resolved.
Show resolved Hide resolved
throw new RuntimeException("Branches are currently not deletable");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.GroupDAOActions;
Expand Down Expand Up @@ -67,8 +66,8 @@ public HibGroup create(Tx tx, InternalActionContext ac, EventQueueBatch batch, S
}

@Override
public void delete(Tx tx, HibGroup group, BulkActionContext bac) {
tx.groupDao().delete(group, bac);
public void delete(Tx tx, HibGroup group) {
tx.groupDao().delete(group);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.JobDAOActions;
Expand Down Expand Up @@ -70,8 +69,8 @@ public boolean update(Tx tx, HibJob job, InternalActionContext ac, EventQueueBat
}

@Override
public void delete(Tx tx, HibJob job, BulkActionContext bac) {
tx.jobDao().delete(job, bac);
public void delete(Tx tx, HibJob job) {
tx.jobDao().delete(job);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.LanguageDAOActions;
Expand Down Expand Up @@ -64,9 +63,9 @@ public boolean update(Tx tx, HibLanguage element, InternalActionContext ac, Even
}

@Override
public void delete(Tx tx, HibLanguage element, BulkActionContext bac) {
public void delete(Tx tx, HibLanguage element) {
// Unassign languages should cause a batch process that removes the FieldContainers for the given language.
tx.languageDao().delete(element, bac);
tx.languageDao().delete(element);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.MicroschemaDAOActions;
Expand Down Expand Up @@ -75,8 +74,8 @@ public boolean update(Tx tx, HibMicroschema element, InternalActionContext ac, E
}

@Override
public void delete(Tx tx, HibMicroschema element, BulkActionContext bac) {
tx.microschemaDao().delete(element, bac);
public void delete(Tx tx, HibMicroschema element) {
tx.microschemaDao().delete(element);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.NodeDAOActions;
Expand Down Expand Up @@ -83,9 +82,9 @@ public HibNode create(Tx tx, InternalActionContext ac, EventQueueBatch batch, St
}

@Override
public void delete(Tx tx, HibNode node, BulkActionContext bac) {
public void delete(Tx tx, HibNode node) {
NodeDao nodeDao = tx.nodeDao();
nodeDao.delete(node, bac, false, true);
nodeDao.delete(node, false, true);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.ProjectDAOActions;
Expand Down Expand Up @@ -70,9 +69,9 @@ public boolean update(Tx tx, HibProject element, InternalActionContext ac, Event
}

@Override
public void delete(Tx tx, HibProject project, BulkActionContext bac) {
public void delete(Tx tx, HibProject project) {
ProjectDao projectDao = tx.projectDao();
projectDao.delete(project, bac);
projectDao.delete(project);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.RoleDAOActions;
Expand Down Expand Up @@ -72,8 +71,8 @@ public boolean update(Tx tx, HibRole role, InternalActionContext ac, EventQueueB
}

@Override
public void delete(Tx tx, HibRole role, BulkActionContext bac) {
tx.roleDao().delete(role, bac);
public void delete(Tx tx, HibRole role) {
tx.roleDao().delete(role);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.SchemaDAOActions;
Expand Down Expand Up @@ -92,8 +91,8 @@ public HibSchema create(Tx tx, InternalActionContext ac, EventQueueBatch batch,
}

@Override
public void delete(Tx tx, HibSchema schema, BulkActionContext bac) {
tx.schemaDao().delete(schema, bac);
public void delete(Tx tx, HibSchema schema) {
tx.schemaDao().delete(schema);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.TagDAOActions;
Expand Down Expand Up @@ -99,9 +98,9 @@ public HibTag create(Tx tx, InternalActionContext ac, EventQueueBatch batch, Str
}

@Override
public void delete(Tx tx, HibTag tag, BulkActionContext bac) {
public void delete(Tx tx, HibTag tag) {
TagDao tagDao = tx.tagDao();
tagDao.delete(tag, bac);
tagDao.delete(tag);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.TagFamilyDAOActions;
Expand Down Expand Up @@ -83,9 +82,9 @@ public HibTagFamily create(Tx tx, InternalActionContext ac, EventQueueBatch batc
}

@Override
public void delete(Tx tx, HibTagFamily tagFamily, BulkActionContext bac) {
public void delete(Tx tx, HibTagFamily tagFamily) {
TagFamilyDao tagFamilyDao = tx.tagFamilyDao();
tagFamilyDao.delete(tagFamily, bac);
tagFamilyDao.delete(tagFamily);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import javax.inject.Singleton;

import com.gentics.graphqlfilter.filter.operation.FilterOperation;
import com.gentics.mesh.context.BulkActionContext;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.core.action.DAOActionContext;
import com.gentics.mesh.core.action.UserDAOActions;
Expand Down Expand Up @@ -70,8 +69,8 @@ public HibUser create(Tx tx, InternalActionContext ac, EventQueueBatch batch, St
}

@Override
public void delete(Tx tx, HibUser user, BulkActionContext bac) {
tx.userDao().delete(user, bac);
public void delete(Tx tx, HibUser user) {
tx.userDao().delete(user);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@

import com.gentics.mesh.cli.BootstrapInitializer;
import com.gentics.mesh.context.InternalActionContext;
import com.gentics.mesh.context.impl.DummyBulkActionContext;
import com.gentics.mesh.core.action.JobDAOActions;
import com.gentics.mesh.core.data.branch.HibBranch;
import com.gentics.mesh.core.data.dao.JobDao;
Expand Down Expand Up @@ -177,7 +176,7 @@ public void handleDelete(InternalActionContext ac, String uuid) {
HibJob job = root.loadObjectByUuidNoPerm(uuid, true);
db.tx(() -> {
if (job.hasFailed()) {
root.delete(job, new DummyBulkActionContext());
root.delete(job);
} else {
throw error(BAD_REQUEST, "job_error_invalid_state", uuid);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package com.gentics.mesh.core.endpoint.admin.consistency;

import java.io.File;

import com.gentics.mesh.core.data.storage.BinaryStorage;
import com.gentics.mesh.core.db.Database;
import com.gentics.mesh.core.db.Transactional;
import com.gentics.mesh.core.db.Tx;
import com.gentics.mesh.core.rest.admin.consistency.InconsistencySeverity;

import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;

public class BinariesConsistencyCheck implements ConsistencyCheck {

private static final Logger log = LoggerFactory.getLogger(BinariesConsistencyCheck.class);

@Override
public ConsistencyCheckResult invoke(Database db, Tx tx1, boolean attemptRepair) {
Transactional<ConsistencyCheckResult> action = db.transactional(tx -> {
log.info("Binaries check started");
BinaryStorage bst = tx.data().binaryStorage();
ConsistencyCheckResult result = new ConsistencyCheckResult();
tx.binaries().findAll().runInExistingTx(tx)
.filter(bin -> !new File(bst.getFilePath(bin.getUuid())).exists()).forEach(bin -> result.addInconsistency("No binary data found for the binary", bin.getUuid(), InconsistencySeverity.CRITICAL));
log.info("Binaries check finished.");
return result;
});
return action.runInExistingTx(tx1);
}

@Override
public String getName() {
// `binaries` key belongs to `BinaryCheck` :(
return "binarydata";
}

@Override
public boolean asyncOnly() {
return true;
}
}
Loading