Skip to content

Commit

Permalink
Wmm preprocessing (#626)
Browse files Browse the repository at this point in the history
* - Reworked memory model preprocessing to be similar to program preprocessing.
- Added new Wmm pass that merges equivalent relations

* - Fixed Property.getSMTVariable to escape variable names.
- Fixed ConstraintCopier to preserve axiom names when copying.
- Fixed MergeEquivalentRelations to properly distinguish SameScope relations with different scopes.

* - Fixed ConstraintCopier failing to copy SameScope properly.
- Fixed equality checking between SameScope definitions failing if their scopes were NULL

* - Removed MM and MV from RelationNameRepository
- Wmm.getOrCreatePredefinedRelation now creates new M*M / M*V relations when needed, relying on MergeEquivalentRelations to merge duplicates later on.
  • Loading branch information
ThomasHaas authored Feb 28, 2024
1 parent bf457e3 commit 6cc2789
Show file tree
Hide file tree
Showing 15 changed files with 358 additions and 114 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,9 @@ public BooleanFormula getSMTVariable(EncodingContext ctx) {

public BooleanFormula getSMTVariable(Axiom ax, EncodingContext ctx) {
Preconditions.checkState(this == CAT_SPEC);
return ctx.getBooleanFormulaManager()
.makeVariable("Flag " + Optional.ofNullable(ax.getName()).orElse(ax.getRelation().getNameOrTerm()));
final String varName = ctx.getFormulaManager().escape("Flag " +
Optional.ofNullable(ax.getName()).orElse(ax.getRelation().getNameOrTerm()));
return ctx.getBooleanFormulaManager().makeVariable(varName);
}

// ------------------------- Static -------------------------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ private void run() throws InterruptedException, SolverException, InvalidConfigur

memoryModel.configureAll(config);
preprocessProgram(task, config);
preprocessMemoryModel(task);
preprocessMemoryModel(task, config);
performStaticProgramAnalyses(task, analysisContext, config);
performStaticWmmAnalyses(task, analysisContext, config);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ private void run() throws InterruptedException, SolverException, InvalidConfigur

memoryModel.configureAll(config);
preprocessProgram(task, config);
preprocessMemoryModel(task);
preprocessMemoryModel(task, config);
performStaticProgramAnalyses(task, analysisContext, config);
performStaticWmmAnalyses(task, analysisContext, config);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ private void run() throws InterruptedException, SolverException, InvalidConfigur

memoryModel.configureAll(config);
preprocessProgram(task, config);
preprocessMemoryModel(task);
preprocessMemoryModel(task, config);
performStaticProgramAnalyses(task, analysisContext, config);
performStaticWmmAnalyses(task, analysisContext, config);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.dat3m.dartagnan.wmm.analysis.RelationAnalysis;
import com.dat3m.dartagnan.wmm.analysis.WmmAnalysis;
import com.dat3m.dartagnan.wmm.axiom.Axiom;
import com.dat3m.dartagnan.wmm.processing.WmmProcessingManager;
import org.sosy_lab.common.configuration.Configuration;
import org.sosy_lab.common.configuration.InvalidConfigurationException;
import org.sosy_lab.java_smt.api.Model;
Expand Down Expand Up @@ -75,8 +76,9 @@ public static void preprocessProgram(VerificationTask task, Configuration config
computeSpecificationFromProgramAssertions(program);
}
}
public static void preprocessMemoryModel(VerificationTask task) {
task.getMemoryModel().simplify();
public static void preprocessMemoryModel(VerificationTask task, Configuration config) throws InvalidConfigurationException{
final Wmm memoryModel = task.getMemoryModel();
WmmProcessingManager.fromConfig(config).run(memoryModel);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -176,10 +176,10 @@ private void runInternal(SolverContext ctx, ProverEnvironment prover, Verificati

// ------------------------ Preprocessing / Analysis ------------------------

removeFlaggedAxiomsAndReduce(memoryModel);
removeFlaggedAxioms(memoryModel);
memoryModel.configureAll(config);
preprocessProgram(task, config);
preprocessMemoryModel(task);
preprocessMemoryModel(task, config);

performStaticProgramAnalyses(task, analysisContext, config);
// Copy context without WMM analyses because we want to analyse a second model later
Expand Down Expand Up @@ -470,15 +470,14 @@ private static void addBiases(Wmm memoryModel, EnumSet<Baseline> biases) {
}
}

private static void removeFlaggedAxiomsAndReduce(Wmm memoryModel) {
private static void removeFlaggedAxioms(Wmm memoryModel) {
// We remove flagged axioms.
// NOTE: Theoretically, we could cut them but in practice this causes the whole model to get eagerly encoded,
// resulting in the worst combination: eagerly encoded model relations + lazy axiom checks.
// The performance on, e.g., LKMM is horrendous!!!!
List.copyOf(memoryModel.getAxioms()).stream()
.filter(Axiom::isFlagged)
.forEach(memoryModel::removeConstraint);
memoryModel.removeUnconstrainedRelations();
}

// ================================================================================================================
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public class TwoSolvers extends ModelChecker {
private static final Logger logger = LogManager.getLogger(TwoSolvers.class);

private final SolverContext ctx;
private final ProverEnvironment prover1,prover2;
private final ProverEnvironment prover1, prover2;
private final VerificationTask task;

private TwoSolvers(SolverContext c, ProverEnvironment p1, ProverEnvironment p2, VerificationTask t) {
Expand All @@ -44,8 +44,8 @@ private void run() throws InterruptedException, SolverException, InvalidConfigur
Configuration config = task.getConfig();

memoryModel.configureAll(config);
preprocessProgram(task, config);
preprocessMemoryModel(task);
preprocessProgram(task, config);
preprocessMemoryModel(task, config);
performStaticProgramAnalyses(task, analysisContext, config);
performStaticWmmAnalyses(task, analysisContext, config);

Expand All @@ -59,15 +59,15 @@ private void run() throws InterruptedException, SolverException, InvalidConfigur
BooleanFormula encodeProg = programEncoder.encodeFullProgram();
prover1.addConstraint(encodeProg);
prover2.addConstraint(encodeProg);

BooleanFormula encodeWmm = wmmEncoder.encodeFullMemoryModel();
prover1.addConstraint(encodeWmm);
prover1.addConstraint(encodeWmm);
prover2.addConstraint(encodeWmm);

// For validation this contains information.
// For verification graph.encode() just returns ctx.mkTrue()
BooleanFormula encodeWitness = task.getWitness().encode(context);
prover1.addConstraint(encodeWitness);
prover1.addConstraint(encodeWitness);
prover2.addConstraint(encodeWitness);

BooleanFormula encodeSymm = symmetryEncoder.encodeFullSymmetryBreaking();
Expand All @@ -77,21 +77,21 @@ private void run() throws InterruptedException, SolverException, InvalidConfigur
prover1.addConstraint(propertyEncoder.encodeProperties(task.getProperty()));

logger.info("Starting first solver.check()");
if(prover1.isUnsat()) {
prover2.addConstraint(propertyEncoder.encodeBoundEventExec());
if (prover1.isUnsat()) {
prover2.addConstraint(propertyEncoder.encodeBoundEventExec());
logger.info("Starting second solver.check()");
res = prover2.isUnsat() ? PASS : UNKNOWN;
} else {
res = FAIL;
res = FAIL;
saveFlaggedPairsOutput(memoryModel, wmmEncoder, prover1, context, task.getProgram());
}

if(logger.isDebugEnabled()) {
String smtStatistics = "\n ===== SMT Statistics ===== \n";
for(String key : prover1.getStatistics().keySet()) {
smtStatistics += String.format("\t%s -> %s\n", key, prover1.getStatistics().get(key));
}
logger.debug(smtStatistics);
if (logger.isDebugEnabled()) {
String smtStatistics = "\n ===== SMT Statistics ===== \n";
for (String key : prover1.getStatistics().keySet()) {
smtStatistics += String.format("\t%s -> %s\n", key, prover1.getStatistics().get(key));
}
logger.debug(smtStatistics);
}

// For Safety specs, we have SAT=FAIL, but for reachability specs, we have SAT=PASS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ public class RelationNameRepository {
public static final String CTRLDIRECT = "ctrlDirect";
public static final String EMPTY = "0";
public static final String FR = "fr";
public static final String MM = "(M*M)";
public static final String MV = "(M*V)";
public static final String IDDTRANS = "idd^+";
public static final String DATA = "data";
public static final String ADDR = "addr";
Expand Down
88 changes: 12 additions & 76 deletions dartagnan/src/main/java/com/dat3m/dartagnan/wmm/Wmm.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import org.sosy_lab.common.configuration.Options;

import java.util.*;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
import java.util.stream.Stream;

Expand Down Expand Up @@ -143,10 +142,10 @@ public Relation addDefinition(Definition definition) {
}

public void removeDefinition(Relation definedRelation) {
checkArgument(!(definedRelation.definition instanceof Definition.Undefined),
"Already undefined relation %s.", definedRelation);
logger.trace("Remove definition {}", definedRelation.definition);
definedRelation.definition = new Definition.Undefined(definedRelation);
if (!(definedRelation.getDefinition() instanceof Definition.Undefined)) {
logger.trace("Remove definition {}", definedRelation.getDefinition());
definedRelation.definition = new Definition.Undefined(definedRelation);
}
}

public void addFilter(Filter filter) {
Expand Down Expand Up @@ -178,72 +177,6 @@ public void configureAll(Configuration config) throws InvalidConfigurationExcept
logger.info("{}: {}", REDUCE_ACYCLICITY_ENCODE_SETS, this.config.isReduceAcyclicityEncoding());
}

public void simplify() {
simplifyAssociatives(Union.class, Union::new);
simplifyAssociatives(Intersection.class, Intersection::new);
}

private void simplifyAssociatives(Class<? extends Definition> cls, BiFunction<Relation, Relation[], Definition> constructor) {
for (Relation r : List.copyOf(relations)) {
if (!r.names.isEmpty() || !cls.isInstance(r.definition) ||
constraints.stream().filter(c -> !(c instanceof Definition))
.anyMatch(c -> c.getConstrainedRelations().contains(r))) {
continue;
}
List<Relation> parents = relations.stream().filter(x -> x.getDependencies().contains(r)).toList();
Relation p = parents.size() == 1 ? parents.get(0) : null;
if (p != null && cls.isInstance(p.definition)) {
Relation[] o = Stream.of(r, p)
.flatMap(x -> x.getDependencies().stream())
.filter(x -> !r.equals(x))
.distinct()
.toArray(Relation[]::new);
removeDefinition(p);
Relation alternative = addDefinition(constructor.apply(p, o));
if (alternative != p) {
logger.warn("relation {} becomes duplicate of {}", p, alternative);
}
removeDefinition(r);
deleteRelation(r);
}
}
}

public void removeUnconstrainedRelations() {
// A relation is considered "unconstrained" if it does not (directly or indirectly) contribute to a
// non-defining constraint. Such relations (and their defining constraints) can safely be deleted
// without changing the semantics of the memory model.
final DependencyCollector collector = new DependencyCollector();
getConstraints().stream().filter(c -> !(c instanceof Definition)).forEach(c -> c.accept(collector));
final Set<Relation> relevantRelations = new HashSet<>(collector.collectedRelations);
Wmm.ANARCHIC_CORE_RELATIONS.forEach(n -> relevantRelations.add(getRelation(n)));

for (Constraint c : List.copyOf(getConstraints())) {
if (!relevantRelations.containsAll(c.getConstrainedRelations())) {
removeConstraint(c);
}
}

for (Relation rel : Set.copyOf(getRelations())) {
if (!relevantRelations.contains(rel)) {
deleteRelation(rel);
}
}
}

private final static class DependencyCollector implements Constraint.Visitor<Void> {
private final Set<Relation> collectedRelations = new HashSet<>();
@Override
public Void visitConstraint(Constraint constraint) {
for (Relation rel : constraint.getConstrainedRelations()) {
if (collectedRelations.add(rel)) {
rel.getDefinition().accept(this);
}
}
return null;
}
}

private Relation makePredefinedRelation(String name) {
/*
WARNING: The code has possibly unexpected behaviour:
Expand Down Expand Up @@ -273,20 +206,23 @@ private Relation makePredefinedRelation(String name) {
Relation rfinv = addDefinition(new Inverse(newRelation(), getOrCreatePredefinedRelation(RF)));
yield composition(r, rfinv, getOrCreatePredefinedRelation(CO));
}
case MM -> product(r, Tag.MEMORY, Tag.MEMORY);
case MV -> product(r, Tag.MEMORY, Tag.VISIBLE);
case IDDTRANS -> new TransitiveClosure(r, getOrCreatePredefinedRelation(IDD));
case DATA -> intersection(r, getOrCreatePredefinedRelation(IDDTRANS), getOrCreatePredefinedRelation(MM));
case DATA -> intersection(r,
getOrCreatePredefinedRelation(IDDTRANS),
addDefinition(product(newRelation(), Tag.MEMORY, Tag.MEMORY))
);
case ADDR -> {
Relation addrdirect = getOrCreatePredefinedRelation(ADDRDIRECT);
Relation comp = addDefinition(composition(newRelation(), getOrCreatePredefinedRelation(IDDTRANS), addrdirect));
Relation union = addDefinition(union(newRelation(), addrdirect, comp));
yield intersection(r, union, getOrCreatePredefinedRelation(MM));
Relation mm = addDefinition(product(newRelation(), Tag.MEMORY, Tag.MEMORY));
yield intersection(r, union, mm);
}
case CTRL -> {
Relation comp = addDefinition(composition(newRelation(), getOrCreatePredefinedRelation(IDDTRANS),
getOrCreatePredefinedRelation(CTRLDIRECT)));
yield intersection(r, comp, getOrCreatePredefinedRelation(MV));
Relation mv = addDefinition(product(newRelation(), Tag.MEMORY, Tag.VISIBLE));
yield intersection(r, comp, mv);
}
case POLOC -> intersection(r, getOrCreatePredefinedRelation(PO), getOrCreatePredefinedRelation(LOC));
case RFE -> intersection(r, getOrCreatePredefinedRelation(RF), getOrCreatePredefinedRelation(EXT));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
package com.dat3m.dartagnan.wmm.processing;

import com.dat3m.dartagnan.wmm.Constraint;
import com.dat3m.dartagnan.wmm.Definition;
import com.dat3m.dartagnan.wmm.Relation;
import com.dat3m.dartagnan.wmm.Wmm;
import com.dat3m.dartagnan.wmm.definition.Intersection;
import com.dat3m.dartagnan.wmm.definition.Union;

import java.util.List;
import java.util.function.BiFunction;
import java.util.stream.Stream;

// Flattens nested binary unions/intersections into n-ary ones.
public class FlattenAssociatives implements WmmProcessor {

private FlattenAssociatives() {
}

public static FlattenAssociatives newInstance() {
return new FlattenAssociatives();
}

@Override
public void run(Wmm wmm) {
flattenAssociatives(wmm, Union.class, Union::new);
flattenAssociatives(wmm, Intersection.class, Intersection::new);
}

private void flattenAssociatives(Wmm wmm,
Class<? extends Definition> cls,
BiFunction<Relation, Relation[], Definition> constructor
) {
final List<Relation> relations = List.copyOf(wmm.getRelations());
final List<Constraint> constraints = wmm.getConstraints().stream()
.filter(c -> !(c instanceof Definition)).toList();

for (Relation r : relations) {
if (r.hasName() || !cls.isInstance(r.getDefinition())
|| constraints.stream().anyMatch(c -> c.getConstrainedRelations().contains(r))) {
continue;
}
final List<Relation> dependents = relations.stream().filter(x -> x.getDependencies().contains(r)).toList();
final Relation p = (dependents.size() == 1 ? dependents.get(0) : null);
if (p != null && cls.isInstance(p.getDefinition())) {
final Relation[] o = Stream.of(r, p)
.flatMap(x -> x.getDependencies().stream())
.filter(x -> !r.equals(x))
.distinct()
.toArray(Relation[]::new);
wmm.removeDefinition(p);
wmm.addDefinition(constructor.apply(p, o));
wmm.removeDefinition(r);
wmm.deleteRelation(r);
}
}
}

}
Loading

0 comments on commit 6cc2789

Please sign in to comment.