pluginDescriptors,
+ boolean includeHTMLTags)
+ {
+ StringBuffer buffer = new StringBuffer();
+
+ if (includeHTMLTags) {
+ buffer.append("");
+ }
+ buffer.append("ProM Package Overview
");
+
+ PMController packageController = new PMController(Boot.Level.NONE);
+ List extends PMPackage> uptodatePackages = packageController.getToUninstallPackages();
+ List extends PMPackage> outofdatePackages = packageController.getToUpdatePackages();
+ buffer.append("Installed packages
");
+ packageListAsTable(buffer, uptodatePackages);
+ buffer.append("Updates available
");
+ packageListAsTable(buffer, outofdatePackages);
+
+ buffer.append("Available plug-ins
");
+ buffer.append("");
+ buffer.append("Plug-in name | UITopia | UITopia name | Package name | Author name | Description | Help |
");
+ for (PluginDescriptor pluginDescriptor : pluginDescriptors) {
+ String uiName = null;
+ boolean isUITopia = false;
+ UITopiaVariant variant = pluginDescriptor.getAnnotation(UITopiaVariant.class);
+ if (variant != null) {
+ uiName = variant.uiLabel();
+ isUITopia = true;
+ variantAsRow(buffer, pluginDescriptor, uiName, variant);
+ }
+ Visualizer visualizer = pluginDescriptor.getAnnotation(Visualizer.class);
+ if (visualizer != null) {
+ uiName = visualizer.name();
+ isUITopia = true;
+ visualizerAsRow(buffer, pluginDescriptor, uiName);
+ }
+ UIImportPlugin importPlugin = pluginDescriptor.getAnnotation(UIImportPlugin.class);
+ if (importPlugin != null) {
+ uiName = pluginDescriptor.getName();
+ isUITopia = true;
+ importPluginAsRow(buffer, pluginDescriptor, uiName);
+ }
+ UIExportPlugin exportPlugin = pluginDescriptor.getAnnotation(UIExportPlugin.class);
+ if (exportPlugin != null) {
+ uiName = pluginDescriptor.getName();
+ isUITopia = true;
+ uiExportPluginAsRow(buffer, pluginDescriptor, uiName);
+ }
+ for (int i = 0; i < pluginDescriptor.getNumberOfMethods(); i++) {
+ variant = pluginDescriptor.getAnnotation(UITopiaVariant.class, i);
+ if (variant != null) {
+ uiName = variant.uiLabel();
+ isUITopia = true;
+ variantAsRow(buffer, pluginDescriptor, uiName, variant);
+ }
+ }
+ if (!isUITopia) {
+ nonUITopiaRow(buffer, pluginDescriptor, uiName);
+ }
+ }
+ buffer.append("
");
+ if (includeHTMLTags) {
+ buffer.append("");
+ }
+ return buffer.toString();
+
+ }
+
+
+ private static void nonUITopiaRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName) {
+ buffer.append("");
+ buffer.append("" + pluginDescriptor.getName() + " | ");
+ buffer.append(" | ");
+ buffer.append("" + (uiName == null ? "" : uiName) + " | ");
+ packageCells(buffer, pluginDescriptor);
+ }
+
+
+ private static void packageCells(StringBuffer buffer, PluginDescriptor pluginDescriptor) {
+ String packName = null;
+ String authorName = null;
+ String description = null;
+ PackageDescriptor packageDescriptor = pluginDescriptor.getPackage();
+ if (packageDescriptor != null) {
+ packName = packageDescriptor.getName();
+ authorName = packageDescriptor.getAuthor();
+ description = packageDescriptor.getDescription();
+ }
+ buffer.append("" + (packName == null ? "" : packName) + " | ");
+ buffer.append("" + (authorName == null ? "" : authorName) + " | ");
+ cell( (description == null ? "" : description), buffer );
+ buffer.append("
");
+ }
+
+
+ private static void uiExportPluginAsRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName) {
+ buffer.append("");
+ buffer.append("" + pluginDescriptor.getName() + " | ");
+ buffer.append("Export | ");
+ buffer.append("" + (uiName == null ? "" : uiName) + " | ");
+ packageCells(buffer, pluginDescriptor);
+ }
+
+
+ private static void importPluginAsRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName) {
+ buffer.append("
");
+ buffer.append("" + pluginDescriptor.getName() + " | ");
+ buffer.append("Import | ");
+ buffer.append("" + (uiName == null ? "" : uiName) + " | ");
+ packageCells(buffer, pluginDescriptor);
+ }
+
+
+ private static void visualizerAsRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName) {
+ buffer.append("
");
+ buffer.append("" + pluginDescriptor.getName() + " | ");
+ buffer.append("Visualizer | ");
+ buffer.append("" + (uiName == null ? "" : uiName) + " | ");
+ packageCells(buffer, pluginDescriptor);
+ }
+
+
+ private static void variantAsRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName,
+ UITopiaVariant variant) {
+ buffer.append("
");
+ buffer.append("" + pluginDescriptor.getName() + " | ");
+ buffer.append("Plug-in variant | ");
+ buffer.append("" + (uiName == null ? "" : uiName) + " | ");
+ String packName = null;
+ PackageDescriptor packageDescriptor = pluginDescriptor.getPackage();
+ if (packageDescriptor != null) {
+ packName = packageDescriptor.getName();
+ }
+ buffer.append("" + (packName == null ? "" : packName) + " | ");
+ buffer.append("" + variant.author() + " | ");
+ cell( variant.uiHelp() , buffer);
+ buffer.append("
");
+ }
+
+ private static void packageListAsTable(StringBuffer buffer, List extends PMPackage> uptodatePackages) {
+ buffer.append("");
+ buffer.append("Package | Dependency | Version | Author | Description |
");
+ for (PMPackage pack : uptodatePackages) {
+ buffer.append("");
+ buffer.append("" + pack.getPackageName() + " | ");
+ buffer.append(" | ");
+ buffer.append("" + pack.getVersion() + " | ");
+ buffer.append("" + pack.getAuthorName() + " | ");
+ cell(pack.getDescription(),buffer);
+ buffer.append("
");
+ for (String s : pack.getDependencies()) {
+ buffer.append(" | " + s + " | | |
");
+ }
+ }
+ buffer.append("
");
+ }
+
+ private static void cell(String contents, StringBuffer buffer) {
+ buffer.append("" + contents + " | ");
+ }
+
+ public static void standardListing(String fileName) throws IOException{
+ FileWriter writer = new FileWriter(fileName);
+ UIPluginContext uipc =
+ new HeadlessUIPluginContext(new ConsoleUIPluginContext(), "show_package_exporter");
+ HTMLToString output = ShowPackageOverviewPlugin.info(uipc);
+ writer.write(output.toHTMLString(true));
+ writer.close();
+ }
+
+ public static void extendedListing(String fileName) throws IOException{
+ FileWriter writer = new FileWriter(fileName);
+ UIPluginContext uipc =
+ new HeadlessUIPluginContext(new ConsoleUIPluginContext(), "show_package_exporter");
+ System.out.println("extendedListing()");
+ PackageManager manager = PackageManager.getInstance();
+ manager.initialize(Boot.Level.ALL);
+ Set packages = manager.getAvailablePackages();
+ System.out.println("Total packages:" + packages.size());
+ SortedSet allPlugins = uipc.getPluginManager().getAllPlugins();
+ System.out.println("Total plugins:" + allPlugins.size());
+ // TODO: at time of writing, no plugins are initialized, so this list is empty
+ // which takes away the most useful aspect of this little tool.
+ writer.write(toExtendedHTMLString(allPlugins, true));
+ writer.close();
+ }
+
+ public static void main(String[] args) throws IOException{
+ // standardListing("prompackages.html");
+ extendedListing("prompackagesextended.html");
+ }
+}
diff --git a/src/au/edu/qut/prom/helpers/PetriNetFragmentParser.java b/src/au/edu/qut/prom/helpers/PetriNetFragmentParser.java
new file mode 100644
index 0000000..1907051
--- /dev/null
+++ b/src/au/edu/qut/prom/helpers/PetriNetFragmentParser.java
@@ -0,0 +1,372 @@
+package au.edu.qut.prom.helpers;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.processmining.acceptingpetrinet.models.AcceptingPetriNet;
+import org.processmining.acceptingpetrinet.models.impl.AcceptingPetriNetImpl;
+import org.processmining.models.graphbased.directed.petrinet.PetrinetNode;
+import org.processmining.models.graphbased.directed.petrinet.StochasticNet;
+import org.processmining.models.graphbased.directed.petrinet.elements.Place;
+import org.processmining.models.graphbased.directed.petrinet.elements.Transition;
+import org.processmining.models.graphbased.directed.petrinet.impl.StochasticNetImpl;
+import org.processmining.models.semantics.petrinet.Marking;
+
+/**
+ * Allows the creation of Petri nets with short one line ascii sketches, for example
+ * initialPlace -> [transition1] -> mp -> [transition2] -> finalPlace
+ *
+ * Larger nets can be created with multiple invocations. Existing nodes will be looked up by
+ * label.
+ *
+ * Weighted transitions without weights, as in {b}
, are defaulted to weight 1.0.
+ *
+ * Current limitations: no support for SPNs beyond weighted transitions. No support for separate
+ * nodes with duplicate labels.
+ *
+ * Methods for creating {@code AcceptingPetriNets} are also provided. These use naming
+ * conventions to identify initial and final markings per
+ * {@link #createAcceptingNet(String, String)}.
+ *
+ * Grammar
+ *
+ *
+ * PETRI_ONELINE_NET :: PLACE EDGE TRANSITION EDGE PLACE_LED_SUBNET
+ * PLACE_LED_SUBNET :: PLACE EDGE TRANSITION EDGE PLACE_LED_SUBNET
+ * PLACE_LED_SUBNET :: PLACE
+ * TRANSITION_SUBNET :: TRANSITION EDGE PLACE EDGE TRANSITION_SUBNET
+ * TRANSITION_SUBNET :: TRANSITION
+ * TRANSITION :: SIMPLE_TRANSITION || WEIGHTED_TRANSITION
+ * SIMPLE_TRANSITION :: '[' LABEL ']'
+ * WEIGHTED_TRANSITION :: WEIGHTED_TRAN_VALUE | WEIGHTED_TRAN_DEFAULT
+ * WEIGHTED_TRAN_VALUE :: '{' LABEL WEIGHT '}'
+ * WEIGHTED_TRAN_DEFAULT:: '{' LABEL '}'
+ * WEIGHT :: [0-9].[0-9]*
+ * PLACE :: LABEL
+ * EDGE :: '->'
+ * LABEL :: alphanumeric string
+ *
+ *
+ * Doesn't work for extended codepoints (eg UTF-16).
+ *
+ * @param netText
+ * @return
+ */
+public class PetriNetFragmentParser{
+
+ private static enum TokenInfo{
+ SIMPLE_TRANSITION("\\[[a-zA-Z][a-zA-Z0-9]*\\]"),
+ WEIGHTED_DEFAULT_TRANSITION("\\{[a-zA-Z][a-zA-Z0-9]*\\}"),
+ WEIGHTED_VALUE_TRANSITION("\\{[a-zA-Z][a-zA-Z0-9]*\\s[0-9]*\\.[0-9]*\\}"),
+ EDGE("->"),
+ PLACE("[a-zA-Z][a-zA-Z0-9]*"),
+ TERMINAL("");
+
+ public static final TokenInfo[] LEX_VALUES =
+ {SIMPLE_TRANSITION,WEIGHTED_DEFAULT_TRANSITION,WEIGHTED_VALUE_TRANSITION,EDGE,PLACE};
+
+ private Pattern pattern;
+
+ private TokenInfo(String regex){
+ this.pattern = Pattern.compile("^\\s*("+regex+")");
+ }
+
+
+ }
+
+ private static class Token{
+ public final PetriNetFragmentParser.TokenInfo tokenInfo;
+ public final String sequence;
+
+ public Token(PetriNetFragmentParser.TokenInfo token, String sequence) {
+ this.tokenInfo = token;
+ this.sequence = sequence;
+ }
+
+ public String toString() {
+ return sequence + ":" + tokenInfo;
+ }
+ }
+
+ public static final Set INITIAL_PLACE_LABELS;
+ public static final Set FINAL_PLACE_LABELS;
+
+ static {
+ INITIAL_PLACE_LABELS = new TreeSet<>();
+ INITIAL_PLACE_LABELS.add("Start");
+ INITIAL_PLACE_LABELS.add("Initial");
+ INITIAL_PLACE_LABELS.add("I");
+ FINAL_PLACE_LABELS = new TreeSet<>();
+ FINAL_PLACE_LABELS.add("End");
+ FINAL_PLACE_LABELS.add("Final");
+ FINAL_PLACE_LABELS.add("F");
+ }
+
+
+ private LinkedList tokens = new LinkedList();
+ private Token lookahead = null;
+ private StochasticNet net;
+ private Map nodeLookup = new HashMap<>();
+
+ public void addToNet(StochasticNet net, String netText) {
+ tokenize(netText);
+ this.net = net;
+ parse();
+ }
+
+ public StochasticNet createNet(String label, String netText) {
+ StochasticNet net = new StochasticNetImpl(label);
+ nodeLookup = new HashMap<>();
+ addToNet(net,netText);
+ return net;
+ }
+
+ /**
+ *
+ * Returns an AcceptingPetriNet with one initial and one final place marked. Initial and final
+ * markings are determined by labeling convention but will only be applied where places have the
+ * correct edge properties, ie, only outgoing for initial places, only incoming for final.
+ *
+ * Naming conventions for initial places, in order of checking: Start, Initial, I.
+ *
+ * Naming conventions for final places, in order of checking: End, Final, F.
+ *
+ * @param label
+ * @param netText
+ * @return
+ */
+ public AcceptingPetriNet createAcceptingNet(String label, String netText) {
+ StochasticNet net = new StochasticNetImpl(label);
+ nodeLookup = new HashMap<>();
+ addToNet(net,netText);
+ return markInitialFinalPlaces(net);
+ }
+
+
+ public AcceptingPetriNet markInitialFinalPlaces(StochasticNet net) {
+ Set initialCandidates = new TreeSet<>();
+ Set finalCandidates = new TreeSet<>();
+ for (Place place: net.getPlaces()) {
+ if ( INITIAL_PLACE_LABELS.contains(place.getLabel())
+ && net.getInEdges(place).isEmpty() )
+ {
+ initialCandidates.add(place);
+ }else {
+ if (FINAL_PLACE_LABELS.contains(place.getLabel())
+ && net.getOutEdges(place).isEmpty())
+ {
+ finalCandidates.add(place);
+ }
+ }
+ }
+ Marking initialMarking = markPlaceFromCandidates(initialCandidates, INITIAL_PLACE_LABELS);
+ Marking finalMarking = markPlaceFromCandidates(finalCandidates, FINAL_PLACE_LABELS);
+ return new AcceptingPetriNetImpl(net,initialMarking,finalMarking);
+ }
+
+ private Marking markPlaceFromCandidates(Set initialCandidates, Set identifyingLabels) {
+ Marking resultMarking = new Marking();
+ for (String initLabel: identifyingLabels) {
+ for (Place initPlace: initialCandidates) {
+ if (initLabel.equals(initPlace.getLabel())){
+ resultMarking.add(initPlace);
+ break;
+ }
+ }
+ }
+ return resultMarking;
+ }
+
+ public AcceptingPetriNet createAcceptingNetArgs(String label, String ... specs) {
+ if (specs.length == 0) {
+ throw new RuntimeException("Cannot create empty Accepting Petri Net");
+ }
+ AcceptingPetriNet anet = createAcceptingNet(label,specs[0]);
+ for (int i=1; iPetrinet in anet
is a
+ * StochasticNet
.
+ *
+ * @param anet
+ * @param netText
+ */
+ public void addToAcceptingNet(AcceptingPetriNet anet, String netText) {
+ net = (StochasticNet)anet.getNet();
+ addToNet(net,netText);
+ anet = markInitialFinalPlaces(net);
+ }
+
+ public StochasticNet createNetArgs(String label, String ... specs) {
+ if (specs.length == 0) {
+ return new StochasticNetImpl(label);
+ }
+ StochasticNet net = createNet(label, specs[0]);
+ for (int i=1; i expectedClass) {
+ PetrinetNode existing = nodeLookup.get(label);
+ if (existing != null)
+ if (!(expectedClass.isInstance(existing))) {
+ throw new RuntimeException("New node " + label + " duplicates existing node of wrong type");
+ }
+ return existing;
+ }
+
+
+ private void edge() {
+ if (lookahead.tokenInfo != TokenInfo.EDGE)
+ throw new RuntimeException("Expected ->, but found " + lookahead );
+ nextToken();
+ }
+
+
+ private Place place() {
+ String label = lookahead.sequence;
+ Place place = checkExistingPlace(label);
+ if (place == null) {
+ place = net.addPlace(label);
+ nodeLookup.put(label,place);
+ }
+ nextToken();
+ return place;
+ }
+
+ private void nextToken() {
+ tokens.pop();
+ // at the end of input we return an epsilon token
+ if (tokens.isEmpty())
+ lookahead = new Token(TokenInfo.TERMINAL, "");
+ else
+ lookahead = tokens.getFirst();
+ }
+
+}
\ No newline at end of file
diff --git a/src/au/edu/qut/prom/helpers/PetrinetExportUtils.java b/src/au/edu/qut/prom/helpers/PetrinetExportUtils.java
new file mode 100644
index 0000000..f32613e
--- /dev/null
+++ b/src/au/edu/qut/prom/helpers/PetrinetExportUtils.java
@@ -0,0 +1,122 @@
+package au.edu.qut.prom.helpers;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.processmining.models.graphbased.directed.petrinet.Petrinet;
+import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge;
+import org.processmining.models.graphbased.directed.petrinet.PetrinetNode;
+import org.processmining.models.graphbased.directed.petrinet.StochasticNet;
+import org.processmining.models.graphbased.directed.petrinet.StochasticNet.DistributionType;
+import org.processmining.models.graphbased.directed.petrinet.elements.Place;
+import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition;
+import org.processmining.models.graphbased.directed.petrinet.elements.Transition;
+import org.processmining.plugins.stochasticpetrinet.StochasticNetUtils;
+
+/**
+ * Utility methods for exporting petri nets in visualization formats.
+ *
+ * Originally adapted from
+ * org.processmining.models.graphbased.directed.petrinet.impl.ToStochasticNet
.
+ *
+ * @author burkeat
+ *
+ */
+public class PetrinetExportUtils {
+
+ private static final double EPSILON = 0.0001;
+ private static final String LINE_SEP = "\n";
+
+ private static int checkId(PetrinetNode node, Map idMapping, int currentCounter) {
+ if (!idMapping.containsKey(node)) {
+ idMapping.put(node, String.valueOf("id" + (currentCounter++)));
+ }
+ return currentCounter;
+ }
+
+
+ /**
+ * Originally adopted from exportPN2DOT method from the EventToActivityMatcher plugin
+ *
+ * @param net
+ * @author Thomas Baier, Andreas Rogge-Solti
+ */
+ public static String convertPetrinetToDOT(Petrinet net) {
+ String lsep = System.getProperty("line.separator");
+
+ String resultString = "digraph G { " + lsep;
+ resultString += "ranksep=\".3\"; fontsize=\"14\"; remincross=true; margin=\"0.0,0.0\"; fontname=\"Arial\";rankdir=\"LR\";" + lsep;
+ resultString += "edge [arrowsize=\"0.5\"];\n";
+ resultString += "node [height=\".2\",width=\".2\",fontname=\"Arial\",fontsize=\"14\"];\n";
+ resultString += "ratio=0.4;" + lsep;
+
+ Map idMapping = new HashMap<>();
+ int id = 1;
+ for (Transition tr : net.getTransitions()) {
+ String label = tr.getLabel();
+ String shape = "shape=\"box\"";
+ if (tr instanceof TimedTransition) {
+ TimedTransition tt = (TimedTransition) tr;
+ label += "\\n" + StochasticNetUtils.printDistribution(tt.getDistribution());
+ if (tt.getDistributionType().equals(DistributionType.IMMEDIATE)) {
+ shape += ",margin=\"0, 0.1\"";
+ }
+ double weight = tt.getWeight();
+ if (weight > 0.0d ) {
+ if ( Math.abs( Math.round(weight) - weight ) < EPSILON ){
+ label += "\\n" + String.format("%d", Math.round(weight));
+ }else {
+ label += "\\n" + String.format("%.3f", weight);
+ }
+ }
+ }
+ if (tr.isInvisible()) {
+ shape += ",color=\"black\",fontcolor=\"white\"";
+ }
+ id = checkId(tr, idMapping, id);
+ resultString += idMapping.get(tr) + " [" + shape + ",label=\"" + label + "\",style=\"filled\"];" + lsep;
+ }
+
+
+ // Places
+ for (Place place : net.getPlaces()) {
+ id = checkId(place, idMapping, id);
+ resultString += idMapping.get(place) + " [shape=\"circle\",label=\"\"];" + lsep;
+ }
+
+ // Edges
+ for (PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edge : net.getEdges()) {
+ id = checkId(edge.getSource(), idMapping, id);
+ id = checkId(edge.getTarget(), idMapping, id);
+
+ String edgeString = idMapping.get(edge.getSource()) + " -> " + idMapping.get(edge.getTarget());
+ resultString += edgeString + lsep;
+ }
+
+ resultString += "}";
+
+ return resultString;
+ }
+
+ /**
+ * Small files only - reads in one hit
+ *
+ * @param filePath
+ * @return
+ * @throws IOException
+ */
+ public static String petriNetFragmentToDOT(String filePath) throws IOException
+ {
+ byte[] encoded = Files.readAllBytes( Paths.get( filePath ));
+ String text = new String(encoded,StandardCharsets.UTF_8);
+ PetriNetFragmentParser parser = new PetriNetFragmentParser();
+ String[] lines = text.split(LINE_SEP);
+ StochasticNet net = parser.createNetArgs(filePath, lines);
+ return convertPetrinetToDOT(net);
+ }
+
+}
diff --git a/src/au/edu/qut/prom/helpers/StochasticPetriNetUtils.java b/src/au/edu/qut/prom/helpers/StochasticPetriNetUtils.java
new file mode 100644
index 0000000..9c28bcb
--- /dev/null
+++ b/src/au/edu/qut/prom/helpers/StochasticPetriNetUtils.java
@@ -0,0 +1,276 @@
+package au.edu.qut.prom.helpers;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.processmining.models.graphbased.directed.AbstractDirectedGraph;
+import org.processmining.models.graphbased.directed.petrinet.Petrinet;
+import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge;
+import org.processmining.models.graphbased.directed.petrinet.PetrinetNode;
+import org.processmining.models.graphbased.directed.petrinet.StochasticNet;
+import org.processmining.models.graphbased.directed.petrinet.elements.Place;
+import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition;
+import org.processmining.models.graphbased.directed.petrinet.elements.Transition;
+import org.processmining.models.graphbased.directed.utils.GraphIterator;
+import org.processmining.models.graphbased.directed.utils.GraphIterator.EdgeAcceptor;
+import org.processmining.models.graphbased.directed.utils.GraphIterator.NodeAcceptor;
+import org.processmining.models.semantics.petrinet.Marking;
+
+public class StochasticPetriNetUtils {
+
+ private static Logger LOGGER = LogManager.getLogger();
+
+ private static final double EPSILON = 0.00001d;
+
+ public static boolean areEqual(StochasticNet o1, StochasticNet o2) {
+ if (o1 == null && o2 == null)
+ return true;
+ if (o1 == null || o2 == null)
+ return false;
+ Map o1PlaceMap = o1.getPlaces().stream().collect(
+ Collectors.toMap(Place::getLabel, Function.identity()));
+ for (Place p: o2.getPlaces()) {
+ Place o1Place = o1PlaceMap.get(p.getLabel());
+ if (!areEqual(o1Place,p)) {
+ LOGGER.debug("Not equal: places {} != {}",p, o1Place);
+ return false;
+ }
+ }
+ Map o1TransitionMap = o1.getTransitions().stream().collect(
+ Collectors.toMap(Transition::getLabel, Function.identity()));
+ for (Transition t: o2.getTransitions()) {
+ Transition o1Transition = o1TransitionMap.get(t.getLabel());
+ if (o1Transition instanceof TimedTransition) {
+ if (!areEqual((TimedTransition)o1Transition,(TimedTransition)t)) {
+ LOGGER.debug("Not equal: transitions {} != {}",t,o1Transition);
+ return false;
+ }
+ }else {
+ if (!areEqual(o1Transition,t))
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public static boolean areEqual(TimedTransition t1, TimedTransition t2) {
+ if (t1 == null && t2 == null)
+ return true;
+ if (t1 == null || t2 == null)
+ return false;
+
+ if ( Math.abs( t1.getWeight() - t2.getWeight() ) > EPSILON
+ || (!t1.getDistributionType().equals(t2.getDistributionType() ) ) )
+ {
+ return false;
+ }
+ return areEqual( (PetrinetNode)t1, t2);
+
+ }
+
+ public static boolean areEqual(PetrinetNode p1, PetrinetNode p2) {
+ if (p1 == null && p2 == null)
+ return true;
+ if (p1 == null || p2 == null)
+ return false;
+ if (! p1.getLabel().equals(p2.getLabel()))
+ return false;
+ return areEqual( p1.getGraph().getInEdges(p1),
+ p2.getGraph().getInEdges(p2));
+
+ }
+
+
+ public static boolean areEqual(Collection> edges1,
+ Collection> edges2) {
+ if (edges1.size() != edges2.size())
+ return false;
+ Map edgeMap1 = edges1.stream().collect(
+ Collectors.toMap(p -> p.getSource().getLabel(),
+ p -> p.getTarget().getLabel()) );
+ for (PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edge: edges2) {
+ String target = edgeMap1.get(edge.getSource().getLabel());
+ if (! target.equals( edge.getTarget().getLabel() ) )
+ return false;
+ }
+ return true;
+ }
+
+ public static Collection findAllSuccessors(Transition transition) {
+
+ final NodeAcceptor nodeAcceptor = new NodeAcceptor() {
+ public boolean acceptNode(PetrinetNode node, int depth) {
+ return ((depth != 0) && (node instanceof Transition) );
+ }
+ };
+
+ Collection transitions = GraphIterator.getDepthFirstSuccessors(transition, transition.getGraph(),
+ new EdgeAcceptor>() {
+
+ public boolean acceptEdge(PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edge,
+ int depth) {
+ return !nodeAcceptor.acceptNode(edge.getSource(), depth);
+ }
+ }, nodeAcceptor);
+
+ return Arrays.asList(transitions.toArray(new Transition[0]));
+ }
+
+ public static Collection findAllPredecessors(Transition transition) {
+
+ final NodeAcceptor nodeAcceptor = new NodeAcceptor() {
+ public boolean acceptNode(PetrinetNode node, int depth) {
+ return ((depth != 0) && (node instanceof Transition) );
+ }
+ };
+
+ Collection transitions = GraphIterator.getDepthFirstPredecessors(transition,
+ transition.getGraph(),
+ new EdgeAcceptor>() {
+
+ public boolean acceptEdge(PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edge,
+ int depth) {
+ return !nodeAcceptor.acceptNode(edge.getTarget(), depth);
+ }
+ }, nodeAcceptor);
+
+ return Arrays.asList(transitions.toArray(new Transition[0]));
+ }
+
+
+ public static Collection findAllSiblings(Transition transition) {
+ Collection transitions = new HashSet();
+ AbstractDirectedGraph> net =
+ transition.getGraph();
+ Collection> incomingEdges =
+ net.getInEdges(transition);
+ for (PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edge: incomingEdges) {
+ Collection> incomingSiblingEdges = net.getOutEdges(edge.getSource());
+ for (PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edgeSibling: incomingSiblingEdges) {
+ transitions.add( edgeSibling.getTarget() );
+ }
+ }
+ return Arrays.asList(transitions.toArray(new Transition[0]));
+ }
+
+ public static Collection predecessors(Place place) {
+ Collection result = new HashSet();
+ for (PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edge: place.getGraph().getInEdges(place)) {
+ result.add((Transition)edge.getSource());
+ }
+ return result;
+ }
+
+ public static Collection successors(Place place) {
+ Collection result = new HashSet();
+ for (PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edge: place.getGraph().getOutEdges(place)) {
+ result.add((Transition)edge.getTarget());
+ }
+ return result;
+ }
+
+ public static Collection predecessors(Transition transition) {
+ Collection result = new HashSet<>();
+ for (PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edge: transition.getGraph().getInEdges(transition)) {
+ result.add((Place)edge.getSource());
+ }
+ return result;
+ }
+
+ public static Collection successors(Transition transition) {
+ Collection result = new HashSet<>();
+ for (PetrinetEdge extends PetrinetNode, ? extends PetrinetNode> edge: transition.getGraph().getOutEdges(transition)) {
+ result.add((Place)edge.getTarget());
+ }
+ return result;
+ }
+
+ /**
+ * Pre: unique label equivalence for places between the two nets. This is needed because
+ * a number of valid and interesting nets don't have input places with no incoming edges, ie,
+ * they are not WorkflowNets. InductiveMiner and other miners can produce such nets under
+ * important edge cases.
+ *
+ * An earlier version of this checked for net.getInEdges(newPlace).isEmpty()
.
+ *
+ * @param initialMarking
+ * @param net
+ * @return
+ */
+ public static Marking findEquivalentInitialMarking(Marking initialMarking, StochasticNet net) {
+ Marking newMarking = new Marking();
+ for (Place oldPlace: initialMarking) {
+ for (Place newPlace: net.getPlaces()) {
+ if (oldPlace.getLabel().equals(newPlace.getLabel() )) {
+ newMarking.add(newPlace);
+ return newMarking;
+ }
+ }
+ }
+ return newMarking;
+ }
+
+ public static Set findEquivalentFinalMarkings(Set finalMarkings, StochasticNet net) {
+ Set newMarkings = new HashSet<>();
+ for (Marking finalMarking: finalMarkings) {
+ Marking newMarking = new Marking();
+ for (Place oldPlace: finalMarking) {
+ for (Place newPlace: net.getPlaces()) {
+ if (oldPlace.getLabel().equals(newPlace.getLabel())
+ && net.getOutEdges(newPlace).isEmpty() ) {
+ newMarking.add(newPlace);
+ }
+ }
+ }
+ newMarkings.add(newMarking);
+ }
+ return newMarkings;
+ }
+
+ /**
+ * StochasticPetriNet2StochasticDeterministicFiniteAutomatonPlugin.guessInitialMarking() by Leemans
+ *
+ * @param net
+ * @return
+ */
+ public static Marking guessInitialMarking(Petrinet net) {
+ Marking result = new Marking();
+ for (Place p : net.getPlaces()) {
+ if (net.getInEdges(p).isEmpty()) {
+ result.add(p);
+ }
+ }
+ return result;
+ }
+
+ /**
+ * Health warning - this simply finds places with only incoming arcs. It will behave
+ * for sound workflow nets, but may be quite different from the set of all possible final
+ * markings given particular initial markings.
+ *
+ * @param finalMarkings
+ * @param net
+ * @return
+ */
+ public static Set guessFinalMarkingsAsIfJustFinalPlaces(Petrinet net) {
+ Set newMarkings = new HashSet<>();
+ Marking newMarking = new Marking();
+ for (Place place: net.getPlaces()) {
+ if ( net.getOutEdges(place).isEmpty() &&
+ !net.getInEdges(place).isEmpty() )
+ {
+ newMarking.add(place);
+ }
+ }
+ newMarkings.add(newMarking);
+ return newMarkings;
+ }
+
+}
diff --git a/src/au/edu/qut/stochasticweightestimationplugins/EstimatorPluginConfiguration.java b/src/au/edu/qut/stochasticweightestimationplugins/EstimatorPluginConfiguration.java
new file mode 100644
index 0000000..612598b
--- /dev/null
+++ b/src/au/edu/qut/stochasticweightestimationplugins/EstimatorPluginConfiguration.java
@@ -0,0 +1,106 @@
+package au.edu.qut.stochasticweightestimationplugins;
+
+import java.awt.GridBagConstraints;
+import java.awt.GridBagLayout;
+
+import javax.swing.JComboBox;
+import javax.swing.JComponent;
+import javax.swing.JFrame;
+import javax.swing.JLabel;
+import javax.swing.JPanel;
+
+import org.deckfour.xes.model.XLog;
+import org.processmining.plugins.InductiveMiner.ClassifierChooser;
+
+import au.edu.qut.pm.spn_estimator.ActivityPairLHEstimator;
+import au.edu.qut.pm.spn_estimator.ActivityPairRHEstimator;
+import au.edu.qut.pm.spn_estimator.AlignmentEstimator;
+import au.edu.qut.pm.spn_estimator.BillClintonEstimator;
+import au.edu.qut.pm.spn_estimator.FrequencyEstimator;
+import au.edu.qut.pm.spn_estimator.LogSourcedWeightEstimator;
+import au.edu.qut.pm.spn_estimator.MeanScaledActivityPairRHEstimator;
+import au.edu.qut.xes.helpers.DelimitedTraceToXESConverter;
+
+/**
+ *
+ * @author burkeat
+ *
+ */
+public class EstimatorPluginConfiguration extends JPanel {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final String[] ESTIMATOR_LABELS;
+ private static final LogSourcedWeightEstimator[] ESTIMATORS;
+
+ private static final int DEFAULT_ESTIMATOR = 3;
+ static {
+ ESTIMATOR_LABELS = new String[6];
+ ESTIMATOR_LABELS[0] = "Frequency Estimator";
+ ESTIMATOR_LABELS[1] = "LH Activity-Pair Estimator";
+ ESTIMATOR_LABELS[2] = "RH Activity-Pair Estimator";
+ ESTIMATOR_LABELS[3] = "Scaled RH Activity-Pair Estimator";
+ ESTIMATOR_LABELS[4] = "Fork Distributed Estimator";
+ ESTIMATOR_LABELS[5] = "Alignment Estimator";
+ ESTIMATORS = new LogSourcedWeightEstimator[6];
+ ESTIMATORS[0] = new FrequencyEstimator();
+ ESTIMATORS[1] = new ActivityPairLHEstimator();
+ ESTIMATORS[2] = new ActivityPairRHEstimator();
+ ESTIMATORS[3] = new MeanScaledActivityPairRHEstimator();
+ ESTIMATORS[4] = new BillClintonEstimator();
+ ESTIMATORS[5] = new AlignmentEstimator();
+ }
+
+ private JComboBox estimatorComboBox ;
+
+ public EstimatorPluginConfiguration(XLog log) {
+ super(new GridBagLayout());
+ GridBagConstraints constraints = new GridBagConstraints();
+ final JLabel estimatorLabel = new JLabel("Weight Estimator");
+ constraints.gridx = 0; constraints.gridy = 0; constraints.ipadx = 10; constraints.anchor = GridBagConstraints.LINE_START;
+ add(estimatorLabel, constraints);
+ estimatorComboBox = new JComboBox(ESTIMATOR_LABELS);
+ constraints.gridx = 2; constraints.gridy = 0; constraints.anchor = GridBagConstraints.LINE_END;
+ add(estimatorComboBox, constraints);
+ final JLabel classifierLabel = new JLabel("Event Classifier");
+ constraints.gridx = 0; constraints.gridy = 1; constraints.ipadx = 10; constraints.anchor = GridBagConstraints.LINE_START;
+ add(classifierLabel, constraints);
+ ClassifierChooser classifierChooser = new ClassifierChooser(log);
+ constraints.gridx = 2; constraints.gridy = 1; constraints.anchor = GridBagConstraints.LINE_END;
+ add(classifierChooser, constraints);
+ }
+
+ public LogSourcedWeightEstimator getEstimator() {
+ int selection = estimatorComboBox.getSelectedIndex();
+ if (selection >= 0) {
+ return ESTIMATORS[selection];
+ }
+ return ESTIMATORS[DEFAULT_ESTIMATOR];
+ }
+
+
+ // Test method
+ private static void createAndShowGUI() {
+ JFrame frame = new JFrame("Test");
+ frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+ //Create and set up the content pane.
+ XLog log = new DelimitedTraceToXESConverter().convertTextArgs("a b","b c");
+ JComponent newContentPane = new EstimatorPluginConfiguration(log);
+ newContentPane.setOpaque(true); //content panes must be opaque
+ frame.setContentPane(newContentPane);
+ //Display the window.
+ frame.pack();
+ frame.setVisible(true);
+ }
+
+ // Test method
+ public static void main(String[] args) {
+ javax.swing.SwingUtilities.invokeLater(new Runnable() {
+ public void run() {
+ createAndShowGUI();
+ }
+ });
+ }
+
+
+}
diff --git a/src/au/edu/qut/stochasticweightestimationplugins/SPDWeightEstimatorPlugin.java b/src/au/edu/qut/stochasticweightestimationplugins/SPDWeightEstimatorPlugin.java
new file mode 100644
index 0000000..1373c2c
--- /dev/null
+++ b/src/au/edu/qut/stochasticweightestimationplugins/SPDWeightEstimatorPlugin.java
@@ -0,0 +1,139 @@
+package au.edu.qut.stochasticweightestimationplugins;
+
+import org.deckfour.uitopia.api.event.TaskListener.InteractionResult;
+import org.deckfour.xes.classification.XEventClassifier;
+import org.deckfour.xes.classification.XEventNameClassifier;
+import org.deckfour.xes.model.XLog;
+import org.processmining.acceptingpetrinet.models.AcceptingPetriNet;
+import org.processmining.acceptingpetrinet.models.impl.AcceptingPetriNetImpl;
+import org.processmining.contexts.uitopia.UIPluginContext;
+import org.processmining.contexts.uitopia.annotations.UITopiaVariant;
+import org.processmining.framework.packages.PackageManager.Canceller;
+import org.processmining.framework.plugin.PluginContext;
+import org.processmining.framework.plugin.annotations.Plugin;
+import org.processmining.framework.plugin.annotations.PluginLevel;
+import org.processmining.framework.plugin.annotations.PluginVariant;
+import org.processmining.models.graphbased.directed.petrinet.Petrinet;
+import org.processmining.models.graphbased.directed.petrinet.StochasticNet;
+import org.processmining.models.semantics.petrinet.Marking;
+import org.processmining.plugins.InductiveMiner.efficienttree.EfficientTree;
+import org.processmining.plugins.InductiveMiner.efficienttree.EfficientTreeReduce.ReductionFailedException;
+import org.processmining.plugins.InductiveMiner.efficienttree.UnknownTreeNodeException;
+import org.processmining.plugins.inductiveminer2.logs.IMLog;
+import org.processmining.plugins.inductiveminer2.mining.InductiveMiner;
+import org.processmining.plugins.inductiveminer2.plugins.InductiveMinerPlugin;
+import org.processmining.plugins.inductiveminer2.variants.MiningParametersIMInfrequent;
+
+import au.edu.qut.pm.spn_estimator.LogSourcedWeightEstimator;
+import au.edu.qut.pm.spn_estimator.MeanScaledActivityPairRHEstimator;
+import au.edu.qut.prom.helpers.StochasticPetriNetUtils;
+
+@Plugin(name = "Mine Stochastic Petri net with estimators",
+ level = PluginLevel.Regular,
+ returnLabels = {"GSPN" },
+ returnTypes = { StochasticNet.class },
+ parameterLabels = { "Log", "Petri Net" },
+ userAccessible = true,
+ help = SPDWeightEstimatorPlugin.TEXT)
+public class SPDWeightEstimatorPlugin {
+
+ public static final String AFFILIATION = "Queensland University of Technology";
+ public static final String AUTHOR = "Adam Burke, Sander Leemans, Moe Thandar Wynn";
+ public static final String EMAIL = "at.burke@qut.edu.au";
+ public static final String TEXT = "Produce a GSPN with immediate transitions from an input log and Petri Net control model.\n"
+ + "The algorithms implemented here are detailed in \n"
+ + "Burke, Leemans and Wynn - Stochastic Process Discovery By Weight Estimation (2020)";
+
+ private static final String DEFAULT_MINER = "Inductive Miner";
+ private static final String DEFAULT_ESTIMATOR = "Mean-Scaled RH Activity-Pair Estimator";
+
+ private static XEventNameClassifier defaultClassifier() {
+ return new XEventNameClassifier();
+ }
+
+ private static MeanScaledActivityPairRHEstimator defaultEstimator() {
+ return new MeanScaledActivityPairRHEstimator();
+ }
+
+ private static String getDefaultMinerName() {
+ return DEFAULT_MINER;
+ }
+
+
+ @UITopiaVariant(affiliation = AFFILIATION, author = AUTHOR, email = EMAIL,
+ uiLabel = "Mine Stochastic Petri net from log with estimator",
+ uiHelp = "Use " + DEFAULT_ESTIMATOR + " and " + DEFAULT_MINER + ". " + TEXT)
+ @PluginVariant(variantLabel = "Mine Stochastic Petri net from log with " + DEFAULT_ESTIMATOR + " and " + DEFAULT_MINER + ")",
+ requiredParameterLabels = {0})
+ public static StochasticNet mineSPNFromLogWithDefaults(final PluginContext context, XLog log) {
+ try {
+ return mineSPNFromLogWithEstimator(context, log,
+ defaultEstimator(), defaultClassifier());
+ } catch (Exception e) {
+ context.log(e);
+ }
+ return null;
+ }
+
+ @UITopiaVariant(affiliation = AFFILIATION, author = AUTHOR, email = EMAIL,
+ uiHelp="Mine Stochastic Petri net with selected estimator. " + TEXT)
+ @PluginVariant(variantLabel = "Mine Stochastic Petri net with selected estimator." + TEXT,
+ requiredParameterLabels = {0, 1} )
+ public static StochasticNet mineGUISPNWithEstimator(final UIPluginContext context, XLog log, Petrinet pnet ) {
+ EstimatorPluginConfiguration estConfig = new EstimatorPluginConfiguration(log);
+ InteractionResult interaction = context.showConfiguration("Configure stochastic weight estimation", estConfig );
+ if (interaction != InteractionResult.CONTINUE) {
+ context.getFutureResult(0).cancel(false);
+ return null;
+ }
+ LogSourcedWeightEstimator estimator = estConfig.getEstimator();
+ Marking initialMarking = StochasticPetriNetUtils.guessInitialMarking(pnet);
+ AcceptingPetriNet apnet = new AcceptingPetriNetImpl(pnet, initialMarking);
+ StochasticNet resultNet = mineSPNWithEstimator(context, apnet, log, estimator, defaultClassifier());
+ return resultNet;
+ }
+
+
+ public static StochasticNet mineSPNWithEstimator(final PluginContext context, AcceptingPetriNet apnet, XLog log,
+ LogSourcedWeightEstimator estimator, XEventClassifier classifier)
+ {
+ context.log("Mining with estimator " + estimator.getReadableID() + "...");
+ StochasticNet resultNet = estimator.estimateWeights(apnet, log, classifier);
+ return resultNet;
+ }
+
+
+ public static StochasticNet mineSPNFromLogWithEstimator(final PluginContext context, XLog log,
+ LogSourcedWeightEstimator estimator, XEventClassifier classifier)
+ throws Exception
+ {
+ context.log("Mining control flow from log with " + getDefaultMinerName());
+ AcceptingPetriNet apnet = mineWithDefaultMiner(context, log, classifier);
+ context.log("Mining with estimator " + estimator.getReadableID() + "...");
+ StochasticNet resultNet = estimator.estimateWeights(apnet, log, classifier);
+ return resultNet;
+ }
+
+
+ public static AcceptingPetriNet mineWithDefaultMiner(final PluginContext context, XLog log, XEventClassifier classifier)
+ throws UnknownTreeNodeException, ReductionFailedException
+ {
+ context.log("Using classifier " + classifier.getClass());
+ MiningParametersIMInfrequent parameters = new MiningParametersIMInfrequent();
+ parameters.setClassifier(classifier);
+ IMLog imlog = parameters.getIMLog(log);
+ context.log("Starting inductive miner ...");
+ EfficientTree tree = InductiveMiner.mineEfficientTree(imlog, parameters, new Canceller() {
+ public boolean isCancelled() {
+ return context.getProgress().isCancelled();
+ }
+ });
+ AcceptingPetriNet pnet = InductiveMinerPlugin.postProcessTree2PetriNet(tree, new Canceller() {
+ public boolean isCancelled() {
+ return context.getProgress().isCancelled();
+ }
+ });
+ return pnet;
+ }
+
+}
diff --git a/src/au/edu/qut/stochasticweightestimationplugins/StochasticWeightEstimatorPlugin.java b/src/au/edu/qut/stochasticweightestimationplugins/StochasticWeightEstimatorPlugin.java
new file mode 100644
index 0000000..083ce53
--- /dev/null
+++ b/src/au/edu/qut/stochasticweightestimationplugins/StochasticWeightEstimatorPlugin.java
@@ -0,0 +1,138 @@
+package au.edu.qut.stochasticweightestimationplugins;
+
+import org.deckfour.uitopia.api.event.TaskListener.InteractionResult;
+import org.deckfour.xes.classification.XEventClassifier;
+import org.deckfour.xes.classification.XEventNameClassifier;
+import org.deckfour.xes.model.XLog;
+import org.processmining.acceptingpetrinet.models.AcceptingPetriNet;
+import org.processmining.acceptingpetrinet.models.impl.AcceptingPetriNetImpl;
+import org.processmining.contexts.uitopia.UIPluginContext;
+import org.processmining.contexts.uitopia.annotations.UITopiaVariant;
+import org.processmining.framework.packages.PackageManager.Canceller;
+import org.processmining.framework.plugin.PluginContext;
+import org.processmining.framework.plugin.annotations.Plugin;
+import org.processmining.framework.plugin.annotations.PluginLevel;
+import org.processmining.framework.plugin.annotations.PluginVariant;
+import org.processmining.models.graphbased.directed.petrinet.Petrinet;
+import org.processmining.models.semantics.petrinet.Marking;
+import org.processmining.plugins.InductiveMiner.efficienttree.EfficientTree;
+import org.processmining.plugins.InductiveMiner.efficienttree.EfficientTreeReduce.ReductionFailedException;
+import org.processmining.plugins.InductiveMiner.efficienttree.UnknownTreeNodeException;
+import org.processmining.plugins.inductiveminer2.logs.IMLog;
+import org.processmining.plugins.inductiveminer2.mining.InductiveMiner;
+import org.processmining.plugins.inductiveminer2.plugins.InductiveMinerPlugin;
+import org.processmining.plugins.inductiveminer2.variants.MiningParametersIMInfrequent;
+import org.processmining.slpnminer.models.StochasticNetImpl;
+
+import au.edu.qut.pm.spn_estimator.LogSourcedWeightEstimator;
+import au.edu.qut.pm.spn_estimator.MeanScaledActivityPairRHEstimator;
+import au.edu.qut.prom.helpers.StochasticPetriNetUtils;
+
+@Plugin(name = "Disc Stochastic Weight with Estimator",
+ level = PluginLevel.Regular,
+ returnLabels = {"GSPN" },
+ returnTypes = { StochasticNetImpl.class },
+ parameterLabels = { "Log", "Petri Net" },
+ userAccessible = true,
+ help = SPDWeightEstimatorPlugin.TEXT)
+public class StochasticWeightEstimatorPlugin {
+
+ public static final String AFFILIATION = "Queensland University of Technology";
+ public static final String AUTHOR = "Adam Burke, Sander Leemans, Moe Thandar Wynn";
+ public static final String EMAIL = "at.burke@qut.edu.au";
+ public static final String TEXT = "Produce a GSPN with immediate transitions from an input log and Petri Net control model.\n"
+ + "The algorithms implemented here are detailed in \n"
+ + "Burke, Leemans and Wynn - Stochastic Process Discovery By Weight Estimation (2020)";
+
+ private static final String DEFAULT_MINER = "Inductive Miner";
+ private static final String DEFAULT_ESTIMATOR = "Mean-Scaled RH Activity-Pair Estimator";
+
+ private static XEventNameClassifier defaultClassifier() {
+ return new XEventNameClassifier();
+ }
+
+ private static MeanScaledActivityPairRHEstimator defaultEstimator() {
+ return new MeanScaledActivityPairRHEstimator();
+ }
+
+ private static String getDefaultMinerName() {
+ return DEFAULT_MINER;
+ }
+
+
+ @UITopiaVariant(affiliation = AFFILIATION, author = AUTHOR, email = EMAIL,
+ uiLabel = "Mine Stochastic Petri net from log with estimator",
+ uiHelp = "Use " + DEFAULT_ESTIMATOR + " and " + DEFAULT_MINER + ". " + TEXT)
+ @PluginVariant(variantLabel = "Mine Stochastic Petri net from log with " + DEFAULT_ESTIMATOR + " and " + DEFAULT_MINER + ")",
+ requiredParameterLabels = {0})
+ public static StochasticNetImpl mineSPNFromLogWithDefaults(final PluginContext context, XLog log) {
+ try {
+ return mineSPNFromLogWithEstimator(context, log,
+ defaultEstimator(), defaultClassifier());
+ } catch (Exception e) {
+ context.log(e);
+ }
+ return null;
+ }
+
+ @UITopiaVariant(affiliation = AFFILIATION, author = AUTHOR, email = EMAIL,
+ uiHelp="Mine Stochastic Petri net with selected estimator. " + TEXT)
+ @PluginVariant(variantLabel = "Mine Stochastic Petri net with selected estimator." + TEXT,
+ requiredParameterLabels = {0, 1} )
+ public static StochasticNetImpl mineGUISPNWithEstimator(final UIPluginContext context, XLog log, Petrinet pnet ) {
+ EstimatorPluginConfiguration estConfig = new EstimatorPluginConfiguration(log);
+ InteractionResult interaction = context.showConfiguration("Configure stochastic weight estimation", estConfig );
+ if (interaction != InteractionResult.CONTINUE) {
+ context.getFutureResult(0).cancel(false);
+ return null;
+ }
+ LogSourcedWeightEstimator estimator = estConfig.getEstimator();
+ Marking initialMarking = StochasticPetriNetUtils.guessInitialMarking(pnet);
+ AcceptingPetriNet apnet = new AcceptingPetriNetImpl(pnet, initialMarking);
+ StochasticNetImpl resultNet = mineSPNWithEstimator(context, apnet, log, estimator, defaultClassifier());
+ return resultNet;
+ }
+
+
+ public static StochasticNetImpl mineSPNWithEstimator(final PluginContext context, AcceptingPetriNet apnet, XLog log,
+ LogSourcedWeightEstimator estimator, XEventClassifier classifier)
+ {
+ context.log("Mining with estimator " + estimator.getReadableID() + "...");
+ estimator.estimateWeights(apnet, log, classifier);
+ return estimator.getResult();
+ }
+
+
+ public static StochasticNetImpl mineSPNFromLogWithEstimator(final PluginContext context, XLog log,
+ LogSourcedWeightEstimator estimator, XEventClassifier classifier)
+ throws Exception
+ {
+ AcceptingPetriNet apnet = mineWithDefaultMiner(context, log, classifier);
+ estimator.estimateWeights(apnet, log, classifier);
+ StochasticNetImpl resultNet = estimator.getResult();
+ return resultNet;
+ }
+
+
+ public static AcceptingPetriNet mineWithDefaultMiner(final PluginContext context, XLog log, XEventClassifier classifier)
+ throws UnknownTreeNodeException, ReductionFailedException
+ {
+ context.log("Using classifier " + classifier.getClass());
+ MiningParametersIMInfrequent parameters = new MiningParametersIMInfrequent();
+ parameters.setClassifier(classifier);
+ IMLog imlog = parameters.getIMLog(log);
+ context.log("Starting inductive miner ...");
+ EfficientTree tree = InductiveMiner.mineEfficientTree(imlog, parameters, new Canceller() {
+ public boolean isCancelled() {
+ return context.getProgress().isCancelled();
+ }
+ });
+ AcceptingPetriNet pnet = InductiveMinerPlugin.postProcessTree2PetriNet(tree, new Canceller() {
+ public boolean isCancelled() {
+ return context.getProgress().isCancelled();
+ }
+ });
+ return pnet;
+ }
+
+}
diff --git a/src/au/edu/qut/xes/helpers/DelimitedTraceToXESConverter.java b/src/au/edu/qut/xes/helpers/DelimitedTraceToXESConverter.java
new file mode 100644
index 0000000..e13e5bf
--- /dev/null
+++ b/src/au/edu/qut/xes/helpers/DelimitedTraceToXESConverter.java
@@ -0,0 +1,139 @@
+package au.edu.qut.xes.helpers;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+import org.deckfour.xes.model.XAttribute;
+import org.deckfour.xes.model.XAttributeMap;
+import org.deckfour.xes.model.XEvent;
+import org.deckfour.xes.model.XLog;
+import org.deckfour.xes.model.XTrace;
+import org.deckfour.xes.model.impl.XAttributeLiteralImpl;
+import org.deckfour.xes.model.impl.XAttributeMapImpl;
+import org.deckfour.xes.model.impl.XEventImpl;
+import org.deckfour.xes.model.impl.XLogImpl;
+import org.deckfour.xes.model.impl.XTraceImpl;
+
+/**
+ * Helper methods for converting a simple delimited file to an XES log. Intended mainly for concise,
+ * expressive test data. Expected syntax for input files is described in documentation for
+ * {@link #convertText(String, String, String)}.
+ *
+ * @author burkeat
+ *
+ */
+public class DelimitedTraceToXESConverter {
+
+ public static final String DEFAULT_EVENT_DELIMITER = " ";
+ public static final String DEFAULT_TRACE_DELIMITER = "\n";
+
+ private static final String XES_CONCEPT_NAME = "concept:name";
+
+ public DelimitedTraceToXESConverter() {
+ }
+
+ /**
+ * Convert a space delimited event log with one trace on each line
+ * using {@link #convertText(String, String, String)}
+ *
+ * @param traces
+ * @return
+ */
+ public XLog convertText(String traces) {
+ return convertText(traces,DEFAULT_EVENT_DELIMITER,DEFAULT_TRACE_DELIMITER);
+ }
+
+ /**
+ * Convert a trace sequence with one trace in each vararg. Each trace is a
+ * sequence of space-delimited events per {@link #convertText(String, String, String)}.
+ *
+ * @param traces
+ * @return
+ */
+ public XLog convertTextArgs(String ... traces) {
+ XAttributeMap attrMap = new XAttributeMapImpl();
+ XLog result = new XLogImpl(attrMap);
+ for (String line: traces) {
+ if ("".equals(line))
+ break;
+ XTrace trace = convertSingleTrace(DEFAULT_EVENT_DELIMITER, line);
+ result.add(trace);
+ }
+ return result;
+ }
+
+ /**
+ * Convert a simple delimited file to an XES log. This is most useful for test data as it does
+ * not allow for the filtering of columns. It does allow multiple events per line, making
+ * for more concise and readable test data files.
+ *
+ * Files are of the form
+ *
+ * LOG :: TRACE {TRACE_DELIMITER TRACE}
+ * TRACE :: EVENT {EVENT_DELIMITER EVENT}
+ * EVENT ::
+ *
+ * @param traces
+ * @param eventDelimiter
+ * @param traceDelimiter
+ * @return
+ */
+ public XLog convertText(String traces, String eventDelimiter, String traceDelimiter) {
+ XAttributeMap attrMap = new XAttributeMapImpl();
+ XLog result = new XLogImpl(attrMap);
+ for (String line: traces.split(traceDelimiter)) {
+ if ("".equals(line))
+ break;
+ XTrace trace = convertSingleTrace(eventDelimiter, line);
+ result.add(trace);
+ }
+ return result;
+ }
+
+ private XTrace convertSingleTrace(String eventDelimiter, String line) {
+ XAttributeMap traceAttrMap = new XAttributeMapImpl();
+ XTrace trace = new XTraceImpl(traceAttrMap);
+ for (String eventLabel: line.split(eventDelimiter)) {
+ if ("".equals(eventLabel))
+ break;
+ XAttributeMap eventAttrMap = new XAttributeMapImpl();
+ XAttribute attr = new XAttributeLiteralImpl(XES_CONCEPT_NAME,eventLabel);
+ eventAttrMap.put(XES_CONCEPT_NAME, attr);
+ XEvent event = new XEventImpl(eventAttrMap);
+ trace.add(event);
+ }
+ return trace;
+ }
+
+ /**
+ *
+ *
+ * @param filePath
+ * @return
+ */
+ public XLog convertFile(String filePath)
+ throws IOException
+ {
+ return convertFile(filePath,DEFAULT_EVENT_DELIMITER,DEFAULT_TRACE_DELIMITER);
+ }
+
+ /**
+ * Small files only. UTF-8 encoding assumed.
+ *
+ * @param filePath
+ * @param eventDelimiter
+ * @param traceDelimiter
+ * @return
+ */
+ public XLog convertFile(String filePath, String eventDelimiter, String traceDelimiter)
+ throws IOException
+ {
+ byte[] encoded = Files.readAllBytes( Paths.get( filePath ));
+ return convertText( new String(encoded,StandardCharsets.UTF_8) );
+ }
+
+
+
+}
diff --git a/src/au/edu/qut/xes/helpers/TargetFunction.java b/src/au/edu/qut/xes/helpers/TargetFunction.java
new file mode 100644
index 0000000..285447d
--- /dev/null
+++ b/src/au/edu/qut/xes/helpers/TargetFunction.java
@@ -0,0 +1,502 @@
+package au.edu.qut.xes.helpers;
+
+import cern.jet.random.engine.MersenneTwister;
+import de.congrace.exp4j.Calculable;
+
+import org.apache.commons.math4.legacy.analysis.MultivariateFunction;
+import org.apache.commons.math4.legacy.optim.InitialGuess;
+import org.apache.commons.math4.legacy.optim.MaxEval;
+import org.apache.commons.math4.legacy.optim.SimpleBounds;
+import org.apache.commons.math4.legacy.optim.nonlinear.scalar.GoalType;
+import org.apache.commons.math4.legacy.optim.nonlinear.scalar.ObjectiveFunction;
+import org.apache.commons.math4.legacy.optim.nonlinear.scalar.PopulationSize;
+import org.apache.commons.math4.legacy.optim.nonlinear.scalar.Sigma;
+import org.apache.commons.math4.legacy.optim.nonlinear.scalar.noderiv.CMAESOptimizer;
+import org.apache.commons.rng.UniformRandomProvider;
+import org.apache.commons.rng.simple.RandomSource;
+import org.apache.commons.math3.analysis.UnivariateFunction;
+
+import java.util.*;
+
+
+public class TargetFunction {
+
+ public static double getUEMSC(double[] x){
+
+ String prob1 = "(t11*t12*t8)/(t11*t12*t7+t11*t7*t7+t11*t12*t8+t11*t7*t8+t12*t7*t8+t7*t7*t8+t12*t8*t8+t7*t8*t8)";
+ String prob2 = "(t11*t12*t8)/(t11*t11*t12+t11*t12*t12+t11*t11*t7+t11*t12*t7+t11*t12*t8+t12*t12*t8+t11*t7*t8+t12*t7*t8)";
+ String prob3 = "(t12*t8)/(t11*t11+t11*t12+t11*t8+t12*t8)";
+ String prob4 = "(t11*t7*t8)/(t11*t11*t12+t11*t12*t12+t11*t11*t7+t11*t12*t7+t11*t12*t8+t12*t12*t8+t11*t7*t8+t12*t7*t8)";
+ String prob5 = "(t11*t7)/(t11*t7+t11*t8+t7*t8+t8*t8)";
+ String prob6 = "(t11*t7*t8)/(t11*t12*t7+t11*t7*t7+t11*t12*t8+t11*t7*t8+t12*t7*t8+t7*t7*t8+t12*t8*t8+t7*t8*t8)";
+
+ HashMap strToDouble = new HashMap<>();
+ strToDouble.put("t12", x[0]);
+ strToDouble.put("t11", x[1]);
+ strToDouble.put("t8", x[2]);
+ strToDouble.put("t7", x[3]);
+ for(String s:strToDouble.keySet()) {
+ System.out.println(s+" "+strToDouble.get(s));
+
+ }
+// System.out.println(converStringToMathExp(prob1, strToDouble));
+// System.out.println(converStringToMathExp(prob2, strToDouble));
+// System.out.println(converStringToMathExp(prob3, strToDouble));
+// System.out.println(converStringToMathExp(prob4, strToDouble));
+
+ return 1 - Math.max(0.00625 - converStringToMathExp(prob1, strToDouble), 0)
+ - Math.max(0.0375 - converStringToMathExp(prob2, strToDouble), 0)
+ - Math.max(0.375 - converStringToMathExp(prob3, strToDouble), 0)
+ - Math.max(0.3375- converStringToMathExp(prob4, strToDouble), 0)
+ - Math.max(0.1875 - converStringToMathExp(prob5, strToDouble), 0)
+ - Math.max(0.05625 - converStringToMathExp(prob6, strToDouble), 0);
+ }
+
+// public static double getEntropy(double[] x){
+// return -0.3 * (Math.log(x[0]/(x[0]+x[1]+x[4])) / Math.log(2))
+// -0.3 * (Math.log(x[1] * x[2]/((x[0]+x[1]+x[4])*(x[2]+x[3]))) / Math.log(2))
+// -0.3 * (Math.log(x[1] * x[3]/((x[0]+x[1]+x[4])*(x[2]+x[3]))) / Math.log(2))
+// -0.1 * (Math.log(x[4] /(x[0]+x[1]+x[4])) / Math.log(2));
+// }
+//
+// public static double getJS(double[] x){
+
+// double p1 = 0.3;
+// double p2 = 0.3;
+// double p3 = 0.3;
+// double p4 = 0.1;
+// double q1 = x[0]/(x[0]+x[1]+x[4]);
+// double q2 = (x[1] * x[2])/((x[0]+x[1]+x[4])*(x[2]+x[3]));
+// double q3 = (x[1] * x[3])/((x[0]+x[1]+x[4])*(x[2]+x[3]));
+// double q4 = x[4]/(x[0]+x[1]+x[4]);
+// double m1 = 0.5*(p1+q1);
+// double m2 = 0.5*(p2+q2);
+// double m3 = 0.5*(p3+q3);
+// double m4 = 0.5*(p4+q4);
+//
+//
+// return
+// 0.3 * Math.log(0.3/m1) + q1 * Math.log(q1 /m1)+
+// 0.3 * Math.log(0.3/m2) + q2 * Math.log(q2 /m2)+
+// 0.3 * Math.log(0.3/m3) + q3 * Math.log(q3 /m3)+
+// 0.1 * Math.log(0.1/m4) + q4 * Math.log(q4 /m4);
+//
+// }
+
+
+ public static void main(String[] args) {
+
+// numerator += x[0];
+// denominator += x[0];
+
+ MultivariateFunction fUEMSC = new MultivariateFunction() {
+ public double value(double[] x) {
+ return getUEMSC(x);
+ }
+ };
+//
+// MultivariateFunction fEntropy = new MultivariateFunction() {
+// public double value(double[] x) {
+// return getEntropy(x);
+// }
+// };
+
+// MultivariateFunction fJS = new MultivariateFunction() {
+// public double value(double[] x) {
+// return getJS(x);
+// }
+// };
+//
+// double[] lowerBound = new double[]{0, 0, 0, 0};
+// double[] upperBound = new double[]{1, 1, 1, 1};
+//
+// System.out.println("\nuEMSC");
+// BOBYQAOptimizer optim1 = new BOBYQAOptimizer(10);
+// PointValuePair result2 = optim1.optimize(
+// new MaxEval(100000),
+// new ObjectiveFunction(fUEMSC),
+// GoalType.MAXIMIZE,
+// new SimpleBounds(lowerBound, upperBound),
+// new InitialGuess(new double[] {0.1,0.1,0.1,0.1})
+// );
+// System.out.println(result2.getPoint()[0]);
+// System.out.println(result2.getPoint()[1]);
+// System.out.println(result2.getPoint()[2]);
+// System.out.println(result2.getPoint()[3]);
+
+
+ int modelTransitionNum = 4;
+ double[] lowerBound = new double[modelTransitionNum];
+ double[] upperBound = new double[modelTransitionNum];
+ double[] initGuess = new double[modelTransitionNum];
+ double[] sigma = new double[modelTransitionNum];
+ Arrays.fill(lowerBound, 0.0001);
+ Arrays.fill(upperBound, 1.0000);
+ Arrays.fill(initGuess, 0.0001);
+ Arrays.fill(sigma, 0.1);
+ UniformRandomProvider rngG = RandomSource.MT_64.create();
+ CMAESOptimizer optimizer = new CMAESOptimizer(
+ 1000000,
+ 0,
+ true,
+ modelTransitionNum,
+ 100,
+ rngG,
+ true,
+ null);
+
+ double[] result2 = optimizer.optimize(
+ new MaxEval(1000000),
+ new ObjectiveFunction(fUEMSC),
+ GoalType.MAXIMIZE,
+ new PopulationSize((int) (4+3*Math.log(modelTransitionNum))),
+ new Sigma(sigma),
+ new InitialGuess(initGuess),
+ new SimpleBounds(lowerBound, upperBound)).getPoint();
+
+ System.out.println(result2[0]);
+ System.out.println(result2[1]);
+ System.out.println(result2[2]);
+ System.out.println(result2[3]);
+// BOBYQAOptimizer optim2 = new BOBYQAOptimizer(11);
+// PointValuePair result2 = optim2.optimize(
+// new MaxEval(100000),
+// new ObjectiveFunction(fEntropy),
+// GoalType.MINIMIZE,
+// new SimpleBounds(lowerBound, upperBound),
+// new InitialGuess(new double[] {1,1,1,1})
+// );
+// System.out.println(result2.getPoint()[0]);
+// System.out.println(result2.getPoint()[1]);
+// System.out.println(result2.getPoint()[2]);
+// System.out.println(result2.getPoint()[3]);
+//
+// System.out.println("\njs");
+// BOBYQAOptimizer optim3 = new BOBYQAOptimizer(9);
+// PointValuePair result3 = optim3.optimize(
+// new MaxEval(100000),
+// new ObjectiveFunction(fJS),
+// GoalType.MINIMIZE,
+// new SimpleBounds(lowerBound, upperBound),
+// new InitialGuess(new double[] {1,1,1,1})
+// );
+// System.out.println(result3.getPoint()[0]);
+// System.out.println(result3.getPoint()[1]);
+// System.out.println(result3.getPoint()[2]);
+// System.out.println(result3.getPoint()[3]);
+
+
+// int maxIterations = 200000;
+// double stopFitness = 0; //Double.NEGATIVE_INFINITY;
+// boolean isActiveCMA = true;
+// int diagonalOnly = 0;
+// int checkFeasableCount = 1;
+// RandomGenerator random = new Well19937c();
+// boolean generateStatistics = false;//
+// OptimizationData sigma = new CMAESOptimizer.Sigma(new double[] {
+// (upperBound[0] - lowerBound[0]),
+// (upperBound[0] - lowerBound[0]),
+// (upperBound[0] - lowerBound[0]),
+// (upperBound[0] - lowerBound[0]),
+// (upperBound[0] - lowerBound[0])});
+// OptimizationData popSize = new CMAESOptimizer.PopulationSize((int) (4 + Math.floor(3 * Math.log(2))));
+//
+// // construct solver
+// ConvergenceChecker checker = new SimpleValueChecker(1e-6, 1e-10);
+//
+// CMAESOptimizer opt = new CMAESOptimizer(maxIterations, stopFitness, isActiveCMA, diagonalOnly,
+// checkFeasableCount, random, generateStatistics, checker);
+// PointValuePair pair = opt.optimize(new InitialGuess(
+// new double[] {1,1,1,1,1}),
+// new ObjectiveFunction(fEntropy),
+// GoalType.MINIMIZE, new SimpleBounds(lowerBound, upperBound),
+// sigma, popSize,
+// new MaxIter(maxIterations), new MaxEval(maxIterations * 2));
+// System.out.println(pair.getPoint()[0]);
+// System.out.println(pair.getPoint()[1]);
+// System.out.println(pair.getPoint()[2]);
+// System.out.println(pair.getPoint()[3]);
+// System.out.println(pair.getPoint()[4]);
+
+ }
+
+ static public double converStringToMathExp(String calculateString, HashMap strToDouble) {
+ return calculateInversePolandExpression(getInversePolandExpression(calculateString),strToDouble);
+ }
+
+ static public double converStringToMathExp(String calculateString) {
+ return calculateInversePolandExpression(getInversePolandExpression2(calculateString));
+ }
+
+ static public double converStringToMathExp(
+ Map constantMap, String calculateString) {
+ double result = 0;
+ for (String str : constantMap.keySet()) {
+ calculateString = calculateString.replaceAll(str,
+ constantMap.get(str));
+ }
+ result = calculateInversePolandExpression(getInversePolandExpression(calculateString));
+ return result;
+ }
+
+
+ static private double calculateInversePolandExpression(
+ List inversePolandExpression) {
+ double result = 0;
+ Stack calculateStack = new Stack();
+ for (String str : inversePolandExpression) {
+ if (str.equals("+") || str.equals("-") || str.equals("*")
+ || str.equals("/")) {
+
+ double t1 = Double.valueOf(calculateStack.pop());
+ double t2 = Double.valueOf(calculateStack.pop());
+ result = simpleCalculate(t2, t1, str);
+ calculateStack.push(result);
+ } else {
+ calculateStack.push(Double.valueOf(str));
+ }
+ }
+// System.out.println(String.valueOf(result));
+ return result;
+ }
+
+ static private double calculateInversePolandExpression(
+ List inversePolandExpression,
+ HashMap strToDouble) {
+ double result = 0;
+ Stack calculateStack = new Stack();
+ for (String str : inversePolandExpression) {
+ if (str.equals("+") || str.equals("-") || str.equals("*")
+ || str.equals("/")) {
+ // do the calculation for two variables.
+ double p1 = calculateStack.pop();
+ double p2 = calculateStack.pop();
+ result = simpleCalculate(p2,p1,str);
+ calculateStack.push(result);
+ } else {
+// System.out.println("get the str:" + str);
+ if(strToDouble.containsKey(str)){
+ calculateStack.push(strToDouble.get(str));
+ }
+ else{
+ calculateStack.push(Double.valueOf(str));
+ }
+ }
+ }
+
+ return result;
+ }
+
+ static private List getInversePolandExpression(
+ String normalExpression) {
+ List inversePolandExpression = new ArrayList();
+ char[] normalChararray = (normalExpression + "$").toCharArray();
+ //
+ Stack signStack = new Stack();
+ List> signStackList = new ArrayList