diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..58fb7ba Binary files /dev/null and b/.DS_Store differ diff --git a/.classpath b/.classpath new file mode 100644 index 0000000..2981dd3 --- /dev/null +++ b/.classpath @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/.github/workflows/ant.yml b/.github/workflows/ant.yml new file mode 100644 index 0000000..b376833 --- /dev/null +++ b/.github/workflows/ant.yml @@ -0,0 +1,46 @@ +# This workflow will build a Java project with Ant +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-ant + +name: Java CI + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + workflow_dispatch: + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Set up JDK 11 + uses: actions/setup-java@v3 + with: + java-version: '11' + distribution: 'temurin' + - name: Build with Ant + run: ant -noinput -buildfile build.xml buildHudson + env: + bytecode_format: ${{ vars.BYTECODE_FORMAT }} + major_minor: ${{ vars.MAJOR_MINOR }} + - name: Upload build result + uses: actions/upload-artifact@v3 + with: + name: release + path: latestrelease/ + - name: Commit files + run: | + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + git add latestrelease/ + git commit -m "Add changes" + - name: Push changes + uses: ad-m/github-push-action@master + with: + branch: ${{ github.ref }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..58c4b62 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +/bin/ +/tests/ +/.idea/ \ No newline at end of file diff --git a/.project b/.project new file mode 100644 index 0000000..d614cff --- /dev/null +++ b/.project @@ -0,0 +1,18 @@ + + + SLPNMiner + + + + + + org.eclipse.jdt.core.javabuilder + + + + + + org.eclipse.jdt.core.javanature + org.apache.ivyde.eclipse.ivynature + + diff --git a/NewPackageIvy.iml b/NewPackageIvy.iml new file mode 100644 index 0000000..2cd76c3 --- /dev/null +++ b/NewPackageIvy.iml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/ProM Package Manager (SLPNMiner).launch b/ProM Package Manager (SLPNMiner).launch new file mode 100644 index 0000000..1d3dfb4 --- /dev/null +++ b/ProM Package Manager (SLPNMiner).launch @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ProM with UITopia (SLPNMiner).launch b/ProM with UITopia (SLPNMiner).launch new file mode 100644 index 0000000..336424d --- /dev/null +++ b/ProM with UITopia (SLPNMiner).launch @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ProM.ini b/ProM.ini new file mode 100644 index 0000000..eec75c6 --- /dev/null +++ b/ProM.ini @@ -0,0 +1,47 @@ +# This file contains information about this ProM release +# it points ProM to the right packages and keeps version +# information +# +# Folders should be separated using "/" (forward slash). +# This will be replaced with File.Separator(). +# +# Specifies the ProM release version +PROM_VERSION = NightlyBuild +# +# Specifies which package should be installed +RELEASE_PACKAGE = RunnerUpPackages +# +# Specifies the URL to the default package repository +# (default is "http://prom.win.tue.nl/ProM/packages/packages.xml") +PACKAGE_URL = http://www.promtools.org/prom6/packages/packages.xml +# +# Specifies whether ProM is Verbose +# (possible: "ALL" / "ERROR" / "NONE", defaults to "ALL") +VERBOSE = ALL +# +# The library folder is relative to the prom installation +# folder (default is "lib") +LIB_FOLDER = lib +# +# The images folder is relative to the prom library +# folder (default is "=images") +IMAGES_FOLDER = images +# +# The macro folder is relative to the prom library +# folder (default is "macros") +MACRO_FOLDER = macros +# +# The prom user folder is NOT relative to the +# prom installation folder. The (default is empty, in +# which case the OS handles the location) +PROM_USER_FOLDER = +# +# The package folder is relative to the +# prom user folder. The (default is "packages") +PACKAGE_FOLDER = packages +# +# The workspace folder is relative to the +# prom user folder. The (default is "workspace") +WORKSPACE_FOLDER = workspace + + diff --git a/ReadMe.md b/ReadMe.md new file mode 100644 index 0000000..44c15f8 --- /dev/null +++ b/ReadMe.md @@ -0,0 +1,40 @@ +# SLPNMiner + +SLPNMiner is a ProM package for the discovery of Stochastic Labelled Petri net, which provides plugin-ins for stochastic process discovery. The input are an event log and a Petri net model, and the output is a stochastic labelled petri net. The two current implemented plugins adopt the techniques introduced in the following to assist weight estimation. + +* Entropic relevance: +[Hanan Alkhammash, Artem Polyvyanyy, Alistair Moffat, Luciano García-Bañuelos: Entropic relevance: A mechanism for measuring stochastic process models discovered from event data. Inf. Syst. 107: 101922 (2022)](https://www.sciencedirect.com/science/article/pii/S0306437921001277) + +* Unit Earth Mover Stochastic Conformance: +[Sander J. J. Leemans, Wil M. P. van der Aalst, Tobias Brockhoff, Artem Polyvyanyy: Stochastic process mining: Earth movers' stochastic conformance. Inf. Syst. 102: 101724 (2021)](https://www.sciencedirect.com/science/article/pii/S0306437921001277) + +## Installation +* If you have not yet installed or run ProM6 before, follow the installation tutorial: https://promtools.org/prom-6-getting-started/installation/ + +* Although the majority of ProM developers use jdk 8, I have to use jdk 17 for this project, so that some third-party libraries (requires jdk 11+) can run. Therefore, **jdk 8 is not going to work** for this project*. The following VM argument should be added to make sure ProM GUI can launch with jdk 17: + ```python + -Djava.system.class.loader=org.processmining.framework.util.ProMClassLoader + ``` + + +## Usage +* After starting ProM plugin, import the event log and a Petri net model to the GUI. + + + +* Then, select the plugin-in *Discover SLPN with uEMSC* or *Discover SLPN with Enropic Relevance*. + + + +* The output is a SLPN, which shows the probability value for each transition. + + +## Contributing + +Pull requests are welcome. For major changes, please open an issue first +to discuss what you would like to change. + +Please make sure to update tests as appropriate. + +## License +L-GPL (https://www.gnu.org/licenses/lgpl-3.0.en.html) \ No newline at end of file diff --git a/bpstruct-0.1.117.jar b/bpstruct-0.1.117.jar new file mode 100644 index 0000000..a83aefb Binary files /dev/null and b/bpstruct-0.1.117.jar differ diff --git a/bpstruct-osgi-1.1.jar b/bpstruct-osgi-1.1.jar new file mode 100644 index 0000000..4c5b345 Binary files /dev/null and b/bpstruct-osgi-1.1.jar differ diff --git a/build.xml b/build.xml new file mode 100644 index 0000000..a09f8a0 --- /dev/null +++ b/build.xml @@ -0,0 +1,216 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <packages> + <package name="${project}" version="${version}" os="${os}" url="${project}/${project}-${version}-${os}.zip" desc="${description}" org="${organization}" license="${license}" author="${author}" auto="false" hasPlugins="true" logo="${logo}"> + + + + + + </package> + </packages> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/eclipse-collections-9.2.0.jar b/eclipse-collections-9.2.0.jar new file mode 100644 index 0000000..694343e Binary files /dev/null and b/eclipse-collections-9.2.0.jar differ diff --git a/eclipse-collections-api-8.0.0.jar b/eclipse-collections-api-8.0.0.jar new file mode 100644 index 0000000..68b2bb0 Binary files /dev/null and b/eclipse-collections-api-8.0.0.jar differ diff --git a/img/VM_parameter.jpg b/img/VM_parameter.jpg new file mode 100644 index 0000000..422b65e Binary files /dev/null and b/img/VM_parameter.jpg differ diff --git a/img/step1.jpg b/img/step1.jpg new file mode 100644 index 0000000..db1a727 Binary files /dev/null and b/img/step1.jpg differ diff --git a/img/step2.jpg b/img/step2.jpg new file mode 100644 index 0000000..56fbefe Binary files /dev/null and b/img/step2.jpg differ diff --git a/img/step3.jpg b/img/step3.jpg new file mode 100644 index 0000000..5eaf745 Binary files /dev/null and b/img/step3.jpg differ diff --git a/ivy.xml b/ivy.xml new file mode 100644 index 0000000..653c9a5 --- /dev/null +++ b/ivy.xml @@ -0,0 +1,34 @@ + + + + Version VERSION + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ivysettings.xml b/ivysettings.xml new file mode 100644 index 0000000..ea56ee4 --- /dev/null +++ b/ivysettings.xml @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/latestrelease/SLPNMiner-6.14.2-all.zip b/latestrelease/SLPNMiner-6.14.2-all.zip new file mode 100644 index 0000000..523d5a6 Binary files /dev/null and b/latestrelease/SLPNMiner-6.14.2-all.zip differ diff --git a/latestrelease/SLPNMiner.jar b/latestrelease/SLPNMiner.jar new file mode 100644 index 0000000..39c231a Binary files /dev/null and b/latestrelease/SLPNMiner.jar differ diff --git a/latestrelease/ivy.xml b/latestrelease/ivy.xml new file mode 100644 index 0000000..6e05a53 --- /dev/null +++ b/latestrelease/ivy.xml @@ -0,0 +1,27 @@ + + + + Version 6.14.2 + + + + + + + + + + + + + + + + + + + + + + diff --git a/latestrelease/packages.xml b/latestrelease/packages.xml new file mode 100644 index 0000000..c2c507a --- /dev/null +++ b/latestrelease/packages.xml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/.DS_Store b/src/.DS_Store new file mode 100644 index 0000000..16a3f2c Binary files /dev/null and b/src/.DS_Store differ diff --git a/src/au/.DS_Store b/src/au/.DS_Store new file mode 100644 index 0000000..46dd2c6 Binary files /dev/null and b/src/au/.DS_Store differ diff --git a/src/au/edu/.DS_Store b/src/au/edu/.DS_Store new file mode 100644 index 0000000..033180a Binary files /dev/null and b/src/au/edu/.DS_Store differ diff --git a/src/au/edu/qut/pm/spn_estimator/AbstractFrequencyEstimator.java b/src/au/edu/qut/pm/spn_estimator/AbstractFrequencyEstimator.java new file mode 100644 index 0000000..a0a0c70 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/AbstractFrequencyEstimator.java @@ -0,0 +1,109 @@ +package au.edu.qut.pm.spn_estimator; + +import java.util.HashMap; +import java.util.Map; + +import org.deckfour.xes.classification.XEventClass; +import org.deckfour.xes.classification.XEventClasses; +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.framework.util.Pair; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; + +import au.edu.qut.pm.stochastic.StochasticNetCloner; + +public abstract class AbstractFrequencyEstimator implements LogSourcedWeightEstimator { + + protected Map,Double> followsFrequency = new HashMap<>(); + protected Map activityFrequency = new HashMap<>(); + protected Map startFrequency = new HashMap<>(); + protected Map endFrequency = new HashMap<>(); + protected Map activity2class = new HashMap(); + protected int traceCount = 0; + + + public void scanLog(XLog log, XEventClassifier classifier) { + XEventClasses classes = XEventClasses.deriveEventClasses(classifier, log); + for (int i=0; i(label,classes.getByIndex(j).getId()), + dfa[i][j]); + } + } + } + + @Override + public StochasticNet estimateWeights(AcceptingPetriNet pnet, XLog log, XEventClassifier classifier) { + scanLog(log,classifier); + StochasticNet snet = StochasticNetCloner.cloneFromPetriNet(pnet.getNet()); + estimateWeights(snet); + return snet; + } + + protected double loadFollowFrequency(Transition tran, Transition succTran) { + Pair key = new Pair<>(tran.getLabel(),succTran.getLabel()); + Double value = followsFrequency.get(key); + if (value == null) { + followsFrequency.put(key, 0.0d); + return 0.0; + } + return value; + } + + protected double loadActivityFrequency(Transition tran) { + Double value = activityFrequency.get(tran.getLabel()); + if (value == null) { + activityFrequency.put(tran.getLabel(), 1.0d); + return 1.0; + } + return value; + } + + protected double loadZeroableFrequency(Transition tran, Map freq) { + Double value = freq.get(tran.getLabel()); + if (value == null) { + freq.put(tran.getLabel(), 0.0d); + return 0.0; + } + return value; + } + + +} diff --git a/src/au/edu/qut/pm/spn_estimator/ActivityPairLHEstimator.java b/src/au/edu/qut/pm/spn_estimator/ActivityPairLHEstimator.java new file mode 100644 index 0000000..140f08c --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/ActivityPairLHEstimator.java @@ -0,0 +1,50 @@ +package au.edu.qut.pm.spn_estimator; + +import static au.edu.qut.prom.helpers.StochasticPetriNetUtils.findAllPredecessors; + +import java.util.Collection; + +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.slpnminer.models.StochasticNetImpl; + +public class ActivityPairLHEstimator extends AbstractFrequencyEstimator { + + @Override + public String getShortID() { + return "aplh"; + } + + @Override + public String getReadableID() { + return "Activity Pair Left-Handed Estimator"; + } + + @Override + public void estimateWeights(StochasticNet net) { + edgePairWeights(net); + } + + private void edgePairWeights(StochasticNet net ) { + for (Transition tran: net.getTransitions()) { + TimedTransition transition = (TimedTransition)tran; + Collection predecessors = findAllPredecessors(transition); + double predecessorWeight = 0; + for (Transition pred: predecessors) { + predecessorWeight += loadFollowFrequency(pred, tran); + } + double weight = predecessorWeight + + loadZeroableFrequency(tran, startFrequency) + + loadZeroableFrequency(tran, endFrequency); + transition.setWeight(weight > 0.0 ? weight: 1.0); + } + } + + @Override + public StochasticNetImpl getResult() { + // TODO Auto-generated method stub + return null; + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/ActivityPairLHWeightEstimator.java b/src/au/edu/qut/pm/spn_estimator/ActivityPairLHWeightEstimator.java new file mode 100644 index 0000000..9e45875 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/ActivityPairLHWeightEstimator.java @@ -0,0 +1,52 @@ +package au.edu.qut.pm.spn_estimator; + +import static au.edu.qut.prom.helpers.StochasticPetriNetUtils.findAllPredecessors; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import org.processmining.logabstractions.models.ColumnAbstraction; +import org.processmining.logabstractions.models.MatrixAbstraction; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; + +public class ActivityPairLHWeightEstimator implements WeightEstimator { + + private MatrixAbstraction followsFrequency; + private ColumnAbstraction startFrequency; + private ColumnAbstraction endFrequency; + private Map transition2class = new HashMap(); + + public ActivityPairLHWeightEstimator(MatrixAbstraction followsFrequency, + ColumnAbstraction startFrequency, + ColumnAbstraction endFrequency, + Map transition2class) { + this.followsFrequency = followsFrequency; + this.startFrequency = startFrequency; + this.endFrequency = endFrequency; + this.transition2class = transition2class; + } + + @Override + public void estimateWeights(StochasticNet net) { + edgePairWeights(net); + } + + private void edgePairWeights(StochasticNet net ) { + for (Transition tran: net.getTransitions()) { + E tranEC = transition2class.get(tran); + TimedTransition transition = (TimedTransition)tran; + Collection predecessors = findAllPredecessors(transition); + double predecessorWeight = 0; + for (Transition pred: predecessors) { + predecessorWeight += followsFrequency.getValue( transition2class.get(pred),tranEC); + } + transition.setWeight(predecessorWeight + + startFrequency.getValue(tranEC) + + endFrequency.getValue(tranEC) ); + } + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/ActivityPairRHEstimator.java b/src/au/edu/qut/pm/spn_estimator/ActivityPairRHEstimator.java new file mode 100644 index 0000000..33a2a8b --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/ActivityPairRHEstimator.java @@ -0,0 +1,102 @@ +package au.edu.qut.pm.spn_estimator; + +import static au.edu.qut.prom.helpers.StochasticPetriNetUtils.findAllSuccessors; + +import java.util.Collection; +import java.util.Map; + +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.DistributionType; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.ExecutionPolicy; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.TimeUnit; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.slpnminer.models.StochasticNetImpl; + +import gnu.trove.map.TObjectIntMap; +import gnu.trove.map.hash.THashMap; +import gnu.trove.map.hash.TObjectIntHashMap; + +public class ActivityPairRHEstimator extends AbstractFrequencyEstimator { + + org.processmining.slpnminer.models.StochasticNetImpl result = new org.processmining.slpnminer.models.StochasticNetImpl("target net"); + + @Override + public String getShortID() { + return "aprh"; + } + + @Override + public String getReadableID() { + return "Activity Pair Right-Handed Estimator"; + } + + @Override + public void estimateWeights(StochasticNet net) { + edgePairWeights(net); + } + + private void edgePairWeights(StochasticNet net ) { +// for (Transition tran: net.getTransitions()) { +// TimedTransition transition = (TimedTransition)tran; +// Collection successors = findAllSuccessors(transition); +// double successorWeight = 0; +// for (Transition succ: successors) { +// successorWeight += loadFollowFrequency(tran,succ); +// } +// double weight = successorWeight +// + loadZeroableFrequency(tran, startFrequency) +// + loadZeroableFrequency(tran, endFrequency); +// transition.setWeight(weight > 0.0 ? weight: 1.0); +// } + + TObjectIntMap transition2occurrence = new TObjectIntHashMap(10, 0.5f, 0); + result.setExecutionPolicy(ExecutionPolicy.RACE_ENABLING_MEMORY); + result.setTimeUnit(TimeUnit.HOURS); + Map input2result = new THashMap<>(); + for (Place inputPlace : net.getPlaces()) { + Place resultPlace = result.addPlace(inputPlace.getLabel()); + input2result.put(inputPlace, resultPlace); + } + for (Transition tran : net.getTransitions()) { +// get transition weight + TimedTransition transition = (TimedTransition)tran; + Collection successors = findAllSuccessors(transition); + double successorWeight = 0; + for (Transition succ: successors) { + successorWeight += loadFollowFrequency(tran,succ); + } + double weight = successorWeight + + loadZeroableFrequency(tran, startFrequency) + + loadZeroableFrequency(tran, endFrequency); + + transition.setWeight( weight ); + transition.setDistributionType(DistributionType.IMMEDIATE); + Transition resultTransition = result.addTimedTransition( + tran.getLabel(), + weight, DistributionType.UNIFORM, 0.0, 200.0); + resultTransition.setInvisible(tran.isInvisible()); + input2result.put(tran, resultTransition); + } + + for (PetrinetEdge edge : net.getEdges()) { + PetrinetNode resultSource = input2result.get(edge.getSource()); + PetrinetNode resultTarget = input2result.get(edge.getTarget()); + if (resultSource instanceof Place) { + result.addArc((Place) resultSource, (Transition) resultTarget); + } else { + result.addArc((Transition) resultSource, (Place) resultTarget); + } + } + } + + @Override + public StochasticNetImpl getResult() { + // TODO Auto-generated method stub + return result; + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/ActivityPairRHWeightEstimator.java b/src/au/edu/qut/pm/spn_estimator/ActivityPairRHWeightEstimator.java new file mode 100644 index 0000000..d018717 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/ActivityPairRHWeightEstimator.java @@ -0,0 +1,53 @@ +package au.edu.qut.pm.spn_estimator; + +import static au.edu.qut.prom.helpers.StochasticPetriNetUtils.findAllSuccessors; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import org.processmining.logabstractions.models.ColumnAbstraction; +import org.processmining.logabstractions.models.MatrixAbstraction; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; + +public class ActivityPairRHWeightEstimator implements WeightEstimator{ + + private MatrixAbstraction followsFrequency; + private ColumnAbstraction startFrequency; + private ColumnAbstraction endFrequency; + private Map transition2class = new HashMap(); + + public ActivityPairRHWeightEstimator(MatrixAbstraction followsFrequency, + ColumnAbstraction startFrequency, + ColumnAbstraction endFrequency, + Map transition2class) { + this.followsFrequency = followsFrequency; + this.startFrequency = startFrequency; + this.endFrequency = endFrequency; + this.transition2class = transition2class; + } + + @Override + public void estimateWeights(StochasticNet net) { + edgePairWeights(net); + } + + private void edgePairWeights(StochasticNet net ) { + for (Transition tran: net.getTransitions()) { + E tranEC = transition2class.get(tran); + TimedTransition transition = (TimedTransition)tran; + Collection successors = findAllSuccessors(transition); + double successorWeight = 0; + for (Transition succ: successors) { + successorWeight += followsFrequency.getValue(tranEC, + transition2class.get(succ)); + } + transition.setWeight(successorWeight + + startFrequency.getValue(tranEC) + + endFrequency.getValue(tranEC) ); + } + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/AlignmentEstimator.java b/src/au/edu/qut/pm/spn_estimator/AlignmentEstimator.java new file mode 100644 index 0000000..095ad64 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/AlignmentEstimator.java @@ -0,0 +1,205 @@ +package au.edu.qut.pm.spn_estimator; + +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.deckfour.xes.classification.XEventClass; +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.info.XLogInfo; +import org.deckfour.xes.info.XLogInfoFactory; +import org.deckfour.xes.model.XLog; +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.DistributionType; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.ExecutionPolicy; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.TimeUnit; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.graphbased.directed.petrinet.impl.StochasticNetImpl; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.plugins.astar.petrinet.PetrinetReplayerWithILP; +import org.processmining.plugins.connectionfactories.logpetrinet.TransEvClassMapping; +import org.processmining.plugins.inductiveVisualMiner.alignment.AcceptingPetriNetAlignment; +import org.processmining.plugins.inductiveVisualMiner.alignment.IvMEventClasses; +import org.processmining.plugins.petrinet.replayer.PNLogReplayer; +import org.processmining.plugins.petrinet.replayer.algorithms.IPNReplayAlgorithm; +import org.processmining.plugins.petrinet.replayer.algorithms.costbasedcomplete.CostBasedCompleteParam; +import org.processmining.plugins.petrinet.replayresult.PNRepResult; +import org.processmining.plugins.petrinet.replayresult.StepTypes; +import org.processmining.plugins.replayer.replayresult.SyncReplayResult; + +import au.edu.qut.prom.helpers.StochasticPetriNetUtils; +import gnu.trove.map.TObjectIntMap; +import gnu.trove.map.hash.THashMap; +import gnu.trove.map.hash.TObjectIntHashMap; +import nl.tue.astar.AStarException; + +public class AlignmentEstimator implements LogSourcedWeightEstimator { + + org.processmining.slpnminer.models.StochasticNetImpl result = new org.processmining.slpnminer.models.StochasticNetImpl("target net"); + + private static Logger LOGGER = LogManager.getLogger(); + + @Override + public void estimateWeights(StochasticNet net) { + throw new RuntimeException( + "Indirect estimation not supported. Use estimateWeights(net,log,classifier)"); + } + + @Override + public String getShortID() { + return "align"; + } + + @Override + public String getReadableID() { + return "Alignment"; + } + + @Override + public StochasticNet estimateWeights(AcceptingPetriNet inputNet, XLog log, XEventClassifier classifier) { + XLogInfo xLogInfo = XLogInfoFactory.createLogInfo(log, classifier); + IvMEventClasses eventClasses = new IvMEventClasses(xLogInfo.getEventClasses()); + checkAndDefaultMarkings(inputNet); + AcceptingPetriNetAlignment.addAllLeavesAsPerformanceEventClasses(eventClasses, inputNet); + + TObjectIntMap transition2occurrence = new TObjectIntHashMap(10, 0.5f, 0); + + XEventClass dummy = new XEventClass("", 1); + TransEvClassMapping mapping = createTransitionEventClassMapping(inputNet, eventClasses, dummy); + + PNLogReplayer replayer = new PNLogReplayer(); + CostBasedCompleteParam replayParameters = new CostBasedCompleteParam(eventClasses.getClasses(), dummy, + inputNet.getNet().getTransitions(), 1, 1); + replayParameters.setInitialMarking(inputNet.getInitialMarking()); + replayParameters.setMaxNumOfStates(Integer.MAX_VALUE); + IPNReplayAlgorithm algorithm = new PetrinetReplayerWithILP(); + Marking[] finalMarkings = new Marking[inputNet.getFinalMarkings().size()]; + replayParameters.setFinalMarkings(inputNet.getFinalMarkings().toArray(finalMarkings)); + replayParameters.setCreateConn(false); + replayParameters.setGUIMode(false); + + PNRepResult replayResult = replayLog(inputNet, log, mapping, replayer, replayParameters, algorithm); + + if (replayResult == null) { + LOGGER.error("Couldn't calculate alignment for {}", inputNet); + throw new RuntimeException("Couldn't calculate alignment for input net"); + } + + for (SyncReplayResult aTrace : replayResult) { + for (@SuppressWarnings("unused") Integer traceIndex : aTrace.getTraceIndex()) { + Iterator itType = aTrace.getStepTypes().iterator(); + Iterator itNode = aTrace.getNodeInstance().iterator(); + while (itType.hasNext()) { + StepTypes type = itType.next(); + Object node = itNode.next(); + if (type == StepTypes.MREAL || type == StepTypes.LMGOOD + || type == StepTypes.MINVI) { + if (!(node instanceof Transition)){ + LOGGER.error("Node {} wasn't a transition",node); + throw new RuntimeException("Node wasn't a transition" + node.toString()); + } + transition2occurrence.adjustOrPutValue((Transition) node, 1, 1); + } + } + } + } + StochasticNet result = copyNet(inputNet, transition2occurrence); + return result; + } + + private void checkAndDefaultMarkings(AcceptingPetriNet inputNet) { + if (inputNet.getInitialMarking().isEmpty() ) { + LOGGER.info("Initial markings were empty - guessing"); + Marking initialMarking = StochasticPetriNetUtils.guessInitialMarking(inputNet.getNet()); + if (initialMarking.isEmpty()) { + LOGGER.error("Initial markings required for alignment calculation for - {}", + inputNet); + throw new RuntimeException("Initial markings not supplied for alignment calculation and couldn't guess"); + } + inputNet.setInitialMarking(initialMarking); + } + if (inputNet.getFinalMarkings().isEmpty() + || (null == inputNet.getFinalMarkings().iterator().next() ) ) + { + LOGGER.info("Final markings were empty - using guessed final places"); + Set guessedFinalMarkings = + StochasticPetriNetUtils.guessFinalMarkingsAsIfJustFinalPlaces(inputNet.getNet()); + if (guessedFinalMarkings.isEmpty()) { + LOGGER.error("Couldn't guess final markings"); + throw new RuntimeException("Final markings not supplied for alignment calculation and couldn't guess"); + } + inputNet.setFinalMarkings(guessedFinalMarkings); + } + } + + private PNRepResult replayLog(AcceptingPetriNet inputNet, XLog log, TransEvClassMapping mapping, + PNLogReplayer replayer, CostBasedCompleteParam replayParameters, IPNReplayAlgorithm algorithm) { + PNRepResult replayResult = null; + try { + replayResult = replayer.replayLog(null, inputNet.getNet(), log, mapping, algorithm, + replayParameters); + }catch(AStarException ase) { + LOGGER.error("Error during log replay",ase); + throw new RuntimeException(ase.getMessage()); + } + return replayResult; + } + + private TransEvClassMapping createTransitionEventClassMapping(AcceptingPetriNet inputNet, + IvMEventClasses eventClasses, XEventClass dummy) { + TransEvClassMapping mapping; + { + mapping = new TransEvClassMapping(eventClasses.getClassifier(), dummy); + for (Transition t : inputNet.getNet().getTransitions()) { + if (t.isInvisible()) { + mapping.put(t, dummy); + } else { + mapping.put(t, eventClasses.getByIdentity(t.getLabel())); + } + } + } + return mapping; + } + + private StochasticNet copyNet(AcceptingPetriNet inputNet, TObjectIntMap transition2occurrence) { + result.setExecutionPolicy(ExecutionPolicy.RACE_ENABLING_MEMORY); + result.setTimeUnit(TimeUnit.HOURS); + Map input2result = new THashMap<>(); + for (Place inputPlace : inputNet.getNet().getPlaces()) { + Place resultPlace = result.addPlace(inputPlace.getLabel()); + input2result.put(inputPlace, resultPlace); + } + + for (Transition inputTransition : inputNet.getNet().getTransitions()) { + Transition resultTransition = result.addTimedTransition(inputTransition.getLabel(), + transition2occurrence.get(inputTransition), DistributionType.UNIFORM, 0.0, 200.0); + + resultTransition.setInvisible(inputTransition.isInvisible()); + input2result.put(inputTransition, resultTransition); + } + + for (PetrinetEdge edge : inputNet.getNet().getEdges()) { + PetrinetNode resultSource = input2result.get(edge.getSource()); + PetrinetNode resultTarget = input2result.get(edge.getTarget()); + if (resultSource instanceof Place) { + result.addArc((Place) resultSource, (Transition) resultTarget); + } else { + result.addArc((Transition) resultSource, (Place) resultTarget); + } + } + return result; + } + + @Override + public org.processmining.slpnminer.models.StochasticNetImpl getResult() { + // TODO Auto-generated method stub + return result; + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/BillClintonEstimator.java b/src/au/edu/qut/pm/spn_estimator/BillClintonEstimator.java new file mode 100644 index 0000000..2d946f3 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/BillClintonEstimator.java @@ -0,0 +1,78 @@ +package au.edu.qut.pm.spn_estimator; + +import java.util.HashMap; +import java.util.Map; + +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; + +import au.edu.qut.prom.helpers.StochasticPetriNetUtils; + +/** + * "When he comes to a fork in the road, he takes the fork" -- Jesse Jackson on Bill Clinton + * + * Referred to as ForkDistributionEstimator in the accompanying paper. + * + * @author burkeat + * + */ +public class BillClintonEstimator extends AbstractFrequencyEstimator{ + + @Override + public String getShortID() { + return "bce"; + } + + @Override + public String getReadableID() { + return "Fork Distributed (Bill Clinton) Estimator"; + } + + @Override + public void estimateWeights(StochasticNet net) { + projectedFrequencyWeights(net); + } + + private void projectedFrequencyWeights(StochasticNet net ) { + Map placeWeights = new HashMap<>(); + for (Transition tran: net.getTransitions()) { + for (Place succPlace: StochasticPetriNetUtils.successors(tran)) { + double totalPairWeight = 0; + for (Transition succTran: StochasticPetriNetUtils.successors(succPlace)) { + totalPairWeight += loadFollowFrequency(tran, succTran); + } + if (placeWeights.containsKey(succPlace)){ + totalPairWeight += placeWeights.get(succPlace); + } + placeWeights.put(succPlace, totalPairWeight); + } + // Reset transition weights + TimedTransition transition = (TimedTransition)tran; + transition.setWeight(0); + } + for (Place place: net.getPlaces()) { + if (net.getGraph().getInEdges(place).isEmpty()) { + // Initialize start place + placeWeights.put(place, (double)traceCount); + } + if (placeWeights.get(place) == 0) { + placeWeights.put(place, 1.0); + } + double tranTotal = 0; + for (Transition tran: StochasticPetriNetUtils.successors(place)) { + tranTotal += loadActivityFrequency(tran); + } + for (Transition tran: StochasticPetriNetUtils.successors(place)) { + double freq = loadActivityFrequency(tran); + double placeBudget = placeWeights.get(place); + double weight = placeBudget * freq / tranTotal; + TimedTransition transition = (TimedTransition)tran; + transition.setWeight( transition.getWeight() + weight); + } + } + } + + +} diff --git a/src/au/edu/qut/pm/spn_estimator/BillClintonWeightEstimator.java b/src/au/edu/qut/pm/spn_estimator/BillClintonWeightEstimator.java new file mode 100644 index 0000000..33b8a18 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/BillClintonWeightEstimator.java @@ -0,0 +1,88 @@ +package au.edu.qut.pm.spn_estimator; + +import java.util.HashMap; +import java.util.Map; + +import org.processmining.logabstractions.models.ColumnAbstraction; +import org.processmining.logabstractions.models.MatrixAbstraction; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; + +import au.edu.qut.prom.helpers.StochasticPetriNetUtils; + +/** + * "When he comes to a fork in the road, he takes the fork" -- Jesse Jackson on Bill Clinton + * + * @author burkeat + * + * @param + */ +public class BillClintonWeightEstimator implements WeightEstimator{ + + private MatrixAbstraction followsFrequency; + private ColumnAbstraction activityFrequency; + private ColumnAbstraction startFrequency; + private Map transition2class = new HashMap(); + + public BillClintonWeightEstimator(MatrixAbstraction followsFrequency, + ColumnAbstraction activityFrequency, + ColumnAbstraction startFrequency, + Map transition2class) { + this.followsFrequency = followsFrequency; + this.activityFrequency = activityFrequency; + this.startFrequency = startFrequency; + this.transition2class = transition2class; + } + + @Override + public void estimateWeights(StochasticNet net) { + projectedFrequencyWeights(net); + } + + private void projectedFrequencyWeights(StochasticNet net ) { + Map placeWeights = new HashMap<>(); + for (Transition tran: net.getTransitions()) { + E tranEC = transition2class.get(tran); + for (Place succPlace: StochasticPetriNetUtils.successors(tran)) { + double totalPairWeight = 0; + for (Transition succTran: StochasticPetriNetUtils.successors(succPlace)) { + totalPairWeight += followsFrequency.getValue(tranEC, + transition2class.get(succTran)); + } + if (placeWeights.containsKey(succPlace)){ + totalPairWeight += placeWeights.get(succPlace); + } + placeWeights.put(succPlace, totalPairWeight); + } + // Reset transition weights + TimedTransition transition = (TimedTransition)tran; + transition.setWeight(0); + } + double traceCount = 0; // This would be slightly more efficient if passed in + for (int i=0; i transition2occurrence = new TObjectIntHashMap(10, 0.5f, 0); + result.setExecutionPolicy(ExecutionPolicy.RACE_ENABLING_MEMORY); + result.setTimeUnit(TimeUnit.HOURS); + Map input2result = new THashMap<>(); + for (Place inputPlace : net.getPlaces()) { + Place resultPlace = result.addPlace(inputPlace.getLabel()); + input2result.put(inputPlace, resultPlace); + } + for (Transition tran : net.getTransitions()) { +// get transition weight + Double freq = activityFrequency.get(tran.getLabel()); + if (freq == null){ + freq = 1.0; + } + TimedTransition transition = (TimedTransition)tran; + transition.setWeight( freq ); + transition.setDistributionType(DistributionType.IMMEDIATE); + Transition resultTransition = result.addTimedTransition( + tran.getLabel(), + freq, DistributionType.UNIFORM, 0.0, 200.0); + resultTransition.setInvisible(tran.isInvisible()); + input2result.put(tran, resultTransition); + } + + for (PetrinetEdge edge : net.getEdges()) { + PetrinetNode resultSource = input2result.get(edge.getSource()); + PetrinetNode resultTarget = input2result.get(edge.getTarget()); + if (resultSource instanceof Place) { + result.addArc((Place) resultSource, (Transition) resultTarget); + } else { + result.addArc((Transition) resultSource, (Place) resultTarget); + } + } + + } + + @Override + public String getShortID() { + return "fe"; + } + + @Override + public String getReadableID() { + return "Frequency Estimator"; + } + + public StochasticNetImpl getResult() { + return result; + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/FrequencyWeightEstimator.java b/src/au/edu/qut/pm/spn_estimator/FrequencyWeightEstimator.java new file mode 100644 index 0000000..5613952 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/FrequencyWeightEstimator.java @@ -0,0 +1,27 @@ +package au.edu.qut.pm.spn_estimator; + +import java.util.Map; + +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.DistributionType; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; + +public class FrequencyWeightEstimator implements WeightEstimator { + + private Map activityFrequencies; + + public FrequencyWeightEstimator(Map activityFrequencies) { + this.activityFrequencies = activityFrequencies; + } + + @Override + public void estimateWeights(StochasticNet net) { + for (Transition tran: net.getTransitions()) { + TimedTransition transition = (TimedTransition)tran; + transition.setWeight( activityFrequencies.get(tran.getLabel()) ); + transition.setDistributionType(DistributionType.IMMEDIATE); + } + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/LogSourcedWeightEstimator.java b/src/au/edu/qut/pm/spn_estimator/LogSourcedWeightEstimator.java new file mode 100644 index 0000000..ebaa0fd --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/LogSourcedWeightEstimator.java @@ -0,0 +1,19 @@ +package au.edu.qut.pm.spn_estimator; + +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.model.XLog; +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.slpnminer.models.StochasticNetImpl; + +import au.edu.qut.pm.stochastic.ArtifactCreator; + +public interface LogSourcedWeightEstimator extends WeightEstimator, ArtifactCreator{ + + StochasticNetImpl result = new StochasticNetImpl("target net"); + + public StochasticNetImpl getResult(); + + public StochasticNet estimateWeights(AcceptingPetriNet net, XLog log, XEventClassifier classifier); + +} diff --git a/src/au/edu/qut/pm/spn_estimator/MeanScaledActivityPairRHEstimator.java b/src/au/edu/qut/pm/spn_estimator/MeanScaledActivityPairRHEstimator.java new file mode 100644 index 0000000..8b06bd1 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/MeanScaledActivityPairRHEstimator.java @@ -0,0 +1,61 @@ +package au.edu.qut.pm.spn_estimator; + +import static au.edu.qut.prom.helpers.StochasticPetriNetUtils.findAllSuccessors; + +import java.util.Collection; + +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.slpnminer.models.StochasticNetImpl; + +public class MeanScaledActivityPairRHEstimator extends AbstractFrequencyEstimator { + + @Override + public String getShortID() { + return "msaprh"; + } + + @Override + public String getReadableID() { + return "Mean Scaled Activity Pair Estimator"; + } + + @Override + public void estimateWeights(StochasticNet net) { + edgePairWeights(net); + } + + private void edgePairWeights(StochasticNet net) { + double frequencyTotal = 0; + for (Transition tran : net.getTransitions()) { + frequencyTotal += loadZeroableFrequency(tran,activityFrequency); + } + if (frequencyTotal == 0) { + frequencyTotal = 1.0; + } + double mean = frequencyTotal / net.getTransitions().size(); + for (Transition tran : net.getTransitions()) { + TimedTransition transition = (TimedTransition) tran; + Collection successors = findAllSuccessors(transition); + double successorWeight = 0; + for (Transition succ : successors) { + successorWeight += loadFollowFrequency(tran,succ); + } + double weight = (successorWeight + + loadZeroableFrequency(tran, startFrequency) + + loadZeroableFrequency(tran, endFrequency)) + / mean; + if (weight == 0) + weight = 1.0; + transition.setWeight(weight); + } + } + + @Override + public StochasticNetImpl getResult() { + // TODO Auto-generated method stub + return null; + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/MeanScaledActivityPairRHWeightEstimator.java b/src/au/edu/qut/pm/spn_estimator/MeanScaledActivityPairRHWeightEstimator.java new file mode 100644 index 0000000..5bc8369 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/MeanScaledActivityPairRHWeightEstimator.java @@ -0,0 +1,60 @@ +package au.edu.qut.pm.spn_estimator; + +import static au.edu.qut.prom.helpers.StochasticPetriNetUtils.findAllSuccessors; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import org.processmining.logabstractions.models.ColumnAbstraction; +import org.processmining.logabstractions.models.MatrixAbstraction; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; + +public class MeanScaledActivityPairRHWeightEstimator implements WeightEstimator { + + private MatrixAbstraction followsFrequency; + private ColumnAbstraction activityFrequency; + private ColumnAbstraction startFrequency; + private ColumnAbstraction endFrequency; + private Map transition2class = new HashMap(); + + public MeanScaledActivityPairRHWeightEstimator(MatrixAbstraction followsFrequency, + ColumnAbstraction activityFrequency, + ColumnAbstraction startFrequency, ColumnAbstraction endFrequency, + Map transition2class) { + this.followsFrequency = followsFrequency; + this.activityFrequency = activityFrequency; + this.startFrequency = startFrequency; + this.endFrequency = endFrequency; + this.transition2class = transition2class; + } + + @Override + public void estimateWeights(StochasticNet net) { + edgePairWeights(net); + } + + private void edgePairWeights(StochasticNet net) { + double frequencyTotal = 0; + for (Transition tran : net.getTransitions()) { + frequencyTotal += activityFrequency.getValue(transition2class.get(tran)); + } + double mean = frequencyTotal / net.getTransitions().size(); + for (Transition tran : net.getTransitions()) { + E tranEC = transition2class.get(tran); + TimedTransition transition = (TimedTransition) tran; + Collection successors = findAllSuccessors(transition); + double successorWeight = 0; + for (Transition succ : successors) { + successorWeight += followsFrequency.getValue(tranEC, transition2class.get(succ)); + } + double weight = (successorWeight + + startFrequency.getValue(tranEC) + endFrequency.getValue(tranEC) ) + / mean; + transition.setWeight(weight); + } + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/NoopEstimator.java b/src/au/edu/qut/pm/spn_estimator/NoopEstimator.java new file mode 100644 index 0000000..7b6e219 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/NoopEstimator.java @@ -0,0 +1,21 @@ +package au.edu.qut.pm.spn_estimator; + +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; + +public class NoopEstimator extends AbstractFrequencyEstimator{ + + @Override + public void estimateWeights(StochasticNet net) { + } + + @Override + public String getShortID() { + return "noop"; + } + + @Override + public String getReadableID() { + return "No Operation"; + } + +} diff --git a/src/au/edu/qut/pm/spn_estimator/WeightEstimator.java b/src/au/edu/qut/pm/spn_estimator/WeightEstimator.java new file mode 100644 index 0000000..06aa9b5 --- /dev/null +++ b/src/au/edu/qut/pm/spn_estimator/WeightEstimator.java @@ -0,0 +1,9 @@ +package au.edu.qut.pm.spn_estimator; + +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; + +public interface WeightEstimator { + + public void estimateWeights(StochasticNet net); + +} diff --git a/src/au/edu/qut/pm/stochastic/ArtifactCreator.java b/src/au/edu/qut/pm/stochastic/ArtifactCreator.java new file mode 100644 index 0000000..0121880 --- /dev/null +++ b/src/au/edu/qut/pm/stochastic/ArtifactCreator.java @@ -0,0 +1,6 @@ +package au.edu.qut.pm.stochastic; + +public interface ArtifactCreator { + public String getShortID(); + public String getReadableID(); +} diff --git a/src/au/edu/qut/pm/stochastic/StochasticNetCloner.java b/src/au/edu/qut/pm/stochastic/StochasticNetCloner.java new file mode 100644 index 0000000..a424ce3 --- /dev/null +++ b/src/au/edu/qut/pm/stochastic/StochasticNetCloner.java @@ -0,0 +1,27 @@ +package au.edu.qut.pm.stochastic; + +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.graphbased.directed.petrinet.impl.AbstractResetInhibitorNet; +import org.processmining.models.graphbased.directed.petrinet.impl.StochasticNetImpl; + +public class StochasticNetCloner extends StochasticNetImpl{ + + public StochasticNetCloner(String label) { + super(label); + } + + public static StochasticNet cloneFromPetriNet(Petrinet other) { + StochasticNetCloner net = new StochasticNetCloner(other.getLabel()); + net.cloneFrom((AbstractResetInhibitorNet)other, true, true, true, true, true); + for (Transition tran: net.getTransitions()) { + if (tran instanceof TimedTransition) { + ((TimedTransition) tran).setDistributionType(DistributionType.IMMEDIATE); + } + } + return net; + } + +} \ No newline at end of file diff --git a/src/au/edu/qut/pm/stochastic/StochasticNetDescriptor.java b/src/au/edu/qut/pm/stochastic/StochasticNetDescriptor.java new file mode 100644 index 0000000..1d3a7ce --- /dev/null +++ b/src/au/edu/qut/pm/stochastic/StochasticNetDescriptor.java @@ -0,0 +1,45 @@ +package au.edu.qut.pm.stochastic; + +import java.util.HashSet; +import java.util.Set; + +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.semantics.petrinet.Marking; + +public class StochasticNetDescriptor { + + private String id; + private StochasticNet net; + private Marking initialMarking; + private Set finalMarkings; + + public StochasticNetDescriptor(String id, StochasticNet net, Marking initialMarking) { + this(id,net,initialMarking,new HashSet<>()); + } + + public StochasticNetDescriptor(String id, StochasticNet net, Marking initialMarking, + Set finalMarkings) { + super(); + this.id = id; + this.net = net; + this.initialMarking = initialMarking; + this.finalMarkings = finalMarkings; + } + + public String getId() { + return id; + } + + public StochasticNet getNet() { + return net; + } + + public Marking getInitialMarking() { + return initialMarking; + } + + public Set getFinalMarkings() { + return finalMarkings; + } + +} diff --git a/src/au/edu/qut/processmining/log/ComplexLog.java b/src/au/edu/qut/processmining/log/ComplexLog.java new file mode 100644 index 0000000..e6d8ca1 --- /dev/null +++ b/src/au/edu/qut/processmining/log/ComplexLog.java @@ -0,0 +1,145 @@ +package au.edu.qut.processmining.log; + +import org.deckfour.xes.model.XLog; + +import java.util.Map; + +public class ComplexLog extends SimpleLog { + + private double[] relativeConcurrencyMatrix; + private double[] relativeDFG; + private int[] concurrencyMatrix; + private int[] dfg; + private int[] exclusiveness; + private int[] activityObserved; + + private int[] potentialORs; + + public ComplexLog(Map traces, Map events, XLog xlog) { + super(traces, events, xlog); + } + + public void computePercentages() { + int totalActivities = getEvents().size(); + relativeConcurrencyMatrix = new double[concurrencyMatrix.length]; + relativeDFG = new double[dfg.length]; + + + for(int i = 0; i < totalActivities; i++) { + for (int j = 0; j < totalActivities; j++) { + relativeDFG[i*totalActivities + j] = (double)dfg[i*totalActivities + j]/activityObserved[i]; + relativeConcurrencyMatrix[i*totalActivities + j] = (double)concurrencyMatrix[i*totalActivities + j]/(activityObserved[i]+activityObserved[j]); + } + } + + } + + public double[] getRelativeConcurrencyMatrix(){ return relativeConcurrencyMatrix; } + public double[] getRelativeDFG(){ return relativeDFG; } + + public int[] getPotentialORs() { + return potentialORs; + } + public void setPotentialORs(int[] potentialORs) { + this.potentialORs = potentialORs; + } + + public int[] getActivityObserved() { + return activityObserved; + } + public void setActivityObserved(int[] activityObserved) { + this.activityObserved = activityObserved; + } + + public int[] getConcurrencyMatrix() { + return concurrencyMatrix; + } + public void setConcurrencyMatrix(int[] concurrencyMatrix) { + this.concurrencyMatrix = concurrencyMatrix; + } + + public int[] getExclusiveness() { + return exclusiveness; + } + public void setExclusiveness(int[] exclusiveness) { + this.exclusiveness = exclusiveness; + } + + public int[] getDFG() { + return dfg; + } + public void setDFG(int[] dfg) { + this.dfg = dfg; + } + + public void printExclusivenessMatrix() { + int totalActivities = getEvents().size(); + + System.out.print("DEBUG - printing exclusiveness matrix:"); + for(int i = 0; i < totalActivities; i++) { + System.out.print("\n( "); + for( int j=0; j < totalActivities; j++) { + System.out.print(exclusiveness[i*totalActivities + j] + " "); + } + System.out.print(")"); + } + System.out.println(); + } + + public void printConcurrencyMatrix() { + int totalActivities = getEvents().size(); + + System.out.print("DEBUG - printing concurrency matrix:"); + for(int i = 0; i < totalActivities; i++) { + System.out.print("\n( "); + for( int j=0; j < totalActivities; j++) { + System.out.print(concurrencyMatrix[i*totalActivities + j] + " "); + } + System.out.print(")"); + } + System.out.println(); + } + + public void printRelativeConcurrencyMatrix() { + int totalActivities = getEvents().size(); + + System.out.print("DEBUG - printing relative concurrency matrix:"); + for(int i = 0; i < totalActivities; i++) { + System.out.print("\n( "); + for( int j=0; j < totalActivities; j++) { + System.out.print(relativeConcurrencyMatrix[i*totalActivities + j] + " "); + } + System.out.print(")"); + } + System.out.println(); + } + + public void printDFG() { + int totalActivities = getEvents().size(); + + System.out.print("DEBUG - printing DFG matrix:"); + for(int i = 0; i < totalActivities; i++) { + System.out.print("\n( "); + for( int j=0; j < totalActivities; j++) { + System.out.print(dfg[i*totalActivities + j] + " "); + } + System.out.print(")"); + } + System.out.println(); + } + + public void printRelativeDFG() { + int totalActivities = getEvents().size(); + + System.out.print("DEBUG - printing relative DFG matrix:"); + for(int i = 0; i < totalActivities; i++) { + System.out.print("\n( "); + for( int j=0; j < totalActivities; j++) { + System.out.print(relativeDFG[i*totalActivities + j] + " "); + } + System.out.print(")"); + } + System.out.println(); + } + +} diff --git a/src/au/edu/qut/processmining/log/LogParser.java b/src/au/edu/qut/processmining/log/LogParser.java new file mode 100644 index 0000000..50f4731 --- /dev/null +++ b/src/au/edu/qut/processmining/log/LogParser.java @@ -0,0 +1,538 @@ +/* + * Copyright © 2009-2018 The Apromore Initiative. + * + * This file is part of "Apromore". + * + * "Apromore" is free software; you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 3 of the + * License, or (at your option) any later version. + * + * "Apromore" is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty + * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + * See the GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this program. + * If not, see . + */ + +package au.edu.qut.processmining.log; + +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.IOException; +import java.util.*; + +/** + * Created by Adriano on 14/06/2016. + */ +public class LogParser { + +// !!WARNING - DO NOT CHANGE THESE VALUES! + public static final int STARTCODE = 0; + public static final int ENDCODE = -1; + + + public static SimpleLog getSimpleLog(String path) { + SimpleLog log; + + HashSet labels = new HashSet<>(); + ArrayList orderedLabels; + HashMap labelsToIDs = new HashMap<>(); //this maps the original name of an event to its code + HashMap events = new HashMap<>(); //this maps the code of the event to its original name + HashMap reverseMap = new HashMap<>(); //this maps the event name to its code + HashMap traces = new HashMap<>(); //this is the simple log, each trace is a string associated to its frequency + + int frequency; + String trace; + String strace; + String event; + StringTokenizer tokenizer; + + int LID; + + BufferedReader reader; + + events.put(STARTCODE, "autogen-start"); + events.put(ENDCODE, "autogen-end"); + + try { + reader = new BufferedReader(new FileReader(path)); + + while( reader.ready() ) + { + trace = reader.readLine(); + tokenizer = new StringTokenizer(trace, "::"); + tokenizer.nextToken(); + + while( tokenizer.hasMoreTokens() ) { + event = tokenizer.nextToken(); + labels.add(event); + } + } + + reader.close(); + + orderedLabels = new ArrayList<>(labels); + Collections.sort(orderedLabels); + + LID = 1; + for (String l : orderedLabels) { + labelsToIDs.put(l, LID); + events.put(LID, l); + reverseMap.put(l, LID); + LID++; + } + + reader = new BufferedReader(new FileReader(path)); + + while( reader.ready() ) + { + trace = reader.readLine(); + tokenizer = new StringTokenizer(trace, "::"); + frequency = Integer.valueOf(tokenizer.nextToken()); + + strace = "::" + STARTCODE + "::"; + while( tokenizer.hasMoreTokens() ) { + event = tokenizer.nextToken(); + strace += (labelsToIDs.get(event) + "::"); + } + strace += ENDCODE + "::"; + + if(!traces.containsKey(strace)) traces.put(strace, frequency); + else traces.put(strace, traces.get(strace) + frequency); + } + + reader.close(); + + log = new SimpleLog(traces, events, null); + log.setReverseMap(reverseMap); + log.setStartcode(STARTCODE); + log.setEndcode(ENDCODE); + + } catch ( IOException ioe ) { + System.out.println("ERROR - something went wrong while reading the log file: " + path); + return null; + } + + return log; + + } + + public static SimpleLog getSimpleLog(XLog log, XEventClassifier xEventClassifier) { +// System.out.println("LOGP - starting ... "); +// System.out.println("LOGP - input log size: " + log.size()); + + SimpleLog sLog; + + HashSet labels = new HashSet<>(); + ArrayList orderedLabels; + HashMap labelsToIDs = new HashMap<>(); //this maps the original name of an event to its code + HashMap events = new HashMap<>(); //this maps the code of the event to its original name + HashMap reverseMap = new HashMap<>(); //this maps the event name to its code + HashMap traces = new HashMap<>(); //this is the simple log, each trace is a string associated to its frequency + + int tIndex; //index to iterate on the log traces + int eIndex; //index to iterate on the events of the trace + + XTrace trace; + String sTrace; + + XEvent event; + String label; + + int LID; + long totalEvents; + long oldTotalEvents; + + long traceLength; + long longestTrace = Integer.MIN_VALUE; + long shortestTrace = Integer.MAX_VALUE; + + int totalTraces = log.size(); + long traceSize; + + int[] exclusiveness; + Set executed = new HashSet<>(); + + events.put(STARTCODE, "autogen-start"); + events.put(ENDCODE, "autogen-end"); + + + for( tIndex = 0; tIndex < totalTraces; tIndex++ ) { + /* we firstly get all the concept names + * and we map them into numbers for fast processing + */ + + trace = log.get(tIndex); + traceSize = trace.size(); + + for( eIndex = 0; eIndex < traceSize; eIndex++ ) { + event = trace.get(eIndex); + label = xEventClassifier.getClassIdentity(event); + labels.add(label); + } + } + + orderedLabels = new ArrayList<>(labels); + Collections.sort(orderedLabels); + + LID = 1; + for( String l : orderedLabels ) { + labelsToIDs.put(l, LID); + events.put(LID, l); + reverseMap.put(l, LID); +// System.out.println("DEBUG - ID:label - " + LID + ":" + l); + LID++; + } + + exclusiveness = new int[LID*LID]; + for(int i = 0; i traceLength ) shortestTrace = traceLength; + + if( !traces.containsKey(sTrace) ) traces.put(sTrace, 0); + traces.put(sTrace, traces.get(sTrace)+1); + + for(int a=0; a < LID; a++) { + if(!executed.contains(a)) { + for(int x : executed) { + exclusiveness[x*LID + a]++; + exclusiveness[a*LID + x]++; + } + } + } + } + +// System.out.println("LOGP - total events parsed: " + totalEvents); +// System.out.println("LOGP - total distinct events: " + (events.size() - 2) ); +// System.out.println("LOGP - total distinct traces: " + traces.size() ); + +// for( String t : traces.keySet() ) System.out.println("DEBUG - ["+ traces.get(t) +"] trace: " + t); + +// System.out.println("DEBUG - final mapping:"); +// for( int code : events.keySet() ) System.out.println("DEBUG - " + code + " = " + events.get(code)); + + sLog = new SimpleLog(traces, events, log); + sLog.setExclusiveness(exclusiveness); + sLog.setReverseMap(reverseMap); + sLog.setStartcode(STARTCODE); + sLog.setEndcode(ENDCODE); + sLog.setTotalEvents(totalEvents); + sLog.setShortestTrace(shortestTrace); + sLog.setLongestTrace(longestTrace); + + return sLog; + } + + public SimpleLog getSimpleLog(XLog log, XEventClassifier xEventClassifier, double percentage) { + SimpleLog sLog = getSimpleLog(log, xEventClassifier); + Map traces = sLog.getTraces(); + + TracesComparator tracesComparator = new TracesComparator(traces); + TreeMap sortedTraces = new TreeMap(tracesComparator); + sortedTraces.putAll(traces); + + int maxTraces = (int) (sLog.size() * percentage); + int parsed = 0; + int leastFrequent = 0; + + for( String trace : sortedTraces.keySet() ) { + if( parsed < maxTraces ) { +// System.out.println("DEBUG - trace, frequency: " + trace + "," + traces.get(trace) ); + parsed += traces.get(trace); + leastFrequent = traces.get(trace); + } else sLog.getTraces().remove(trace); + } + +// System.out.println("DEBUG - log size: " + sLog.size()); + System.out.println("INFO - log parsed at " + percentage*100 + "%"); +// System.out.println("DEBUG - to parse: " + maxTraces); +// System.out.println("DEBUG - parsed: " + parsed); +// System.out.println("DEBUG - min frequency: " + leastFrequent); + + sLog.setSize(parsed); + return sLog; + } + + public static SimpleLog getComplexLog(XLog log, XEventClassifier xEventClassifier) { +// System.out.println("LOGP - starting ... "); +// System.out.println("LOGP - input log size: " + log.size()); + + SimpleLog sLog; + + HashSet labels = new HashSet<>(); + ArrayList orderedLabels; + HashMap labelsToIDs = new HashMap<>(); //this maps the original name of an event to its code + HashMap events = new HashMap<>(); //this maps the code of the event to its original name + HashMap reverseMap = new HashMap<>(); //this maps the event name to its code + HashMap traces = new HashMap<>(); //this is the simple log, each trace is a string associated to its frequency + +//------------------------------- SPLIT MINER 2.0 ----------------------------- + int totalActivities; + +// parallelism keep tracks of the real concurrencies + int[] parallelism; + int[] potentialORs; + +// when real concurrencies are available, the directly-follow relations slightly differ from the simple case +// requiring to capture them already at this stage +// reminder: matrix[i][j] = array[i*size + j]; + int[] dfg; + int[] exclusiveness; + Set executed; + +// we need to keep track of all the activities that are still executing +// as well as the last activity that was completed + Set executing; + int lastComplete; + int endEvent; + + int[] activityObserved; +//----------------------------------------------------------------------------- + + int tIndex; //index to iterate on the log traces + int eIndex; //index to iterate on the events of the trace + + XTrace trace; + String sTrace; + + XEvent event; + String label; + + int LID; + long totalEvents; + long oldTotalEvents; + + long startEvents; + long completeEvents; + long totalConcurrencies; + + long traceLength; + long longestTrace = Integer.MIN_VALUE; + long shortestTrace = Integer.MAX_VALUE; + + int totalTraces = log.size(); + long traceSize; + + events.put(STARTCODE, "autogen-start"); +// NOTE: for complex logs (with activities life-cycle), we overwrite the standard ENDCODE later. +// events.put(ENDCODE, "autogen-end"); + + int count = 0; + for( tIndex = 0; tIndex < totalTraces; tIndex++ ) { + /* we firstly get all the concept names + * and we map them into numbers for fast processing + */ + + trace = log.get(tIndex); + traceSize = trace.size(); + + for( eIndex = 0; eIndex < traceSize; eIndex++ ) { + event = trace.get(eIndex); +// System.out.println("DEBUG " + count++ + "- lifecycle: " + event.getAttributes().get("lifecycle:transition")); + label = xEventClassifier.getClassIdentity(event); + labels.add(label); + } + } + + orderedLabels = new ArrayList<>(labels); + Collections.sort(orderedLabels); + + LID = 1; + for( String l : orderedLabels ) { + labelsToIDs.put(l, LID); + events.put(LID, l); + reverseMap.put(l, LID); +// System.out.println("DEBUG - ID:label - " + LID + ":" + l); + LID++; + } + +// this plus one accounts for the artificial end event + totalActivities = events.size()+1; + + potentialORs = new int[totalActivities*totalActivities]; + parallelism = new int[totalActivities*totalActivities]; + dfg = new int[totalActivities*totalActivities]; + activityObserved = new int[totalActivities]; + exclusiveness = new int[totalActivities*totalActivities]; +// this minus one is to ensure we do not go out bound on the array + endEvent = totalActivities-1; + events.put(endEvent, "autogen-end"); + +// reminder: matrix[i][j] = array[i*size + j]; + for(int i = 0; i < totalActivities; i++) { + activityObserved[i] = 0; + for (int j = 0; j < totalActivities; j++) { + dfg[i * totalActivities + j] = 0; + potentialORs[i * totalActivities + j] = 0; + parallelism[i * totalActivities + j] = 0; + exclusiveness[i * totalActivities + j] = 0; + } + } + + totalEvents = 0; + startEvents = 0; + completeEvents = 0; + totalConcurrencies = 0; + for( tIndex = 0; tIndex < totalTraces; tIndex++ ) { + /* we convert each trace in the log into a string + * each string will be a sequence of "::x" terminated with "::", where: + * '::' is a separator + * 'x' is an integer encoding the name of the original event + */ + trace = log.get(tIndex); + traceSize = trace.size(); + + oldTotalEvents = totalEvents; + + sTrace = "::" + Integer.toString(STARTCODE) + ":"; + lastComplete = STARTCODE; + executing = new HashSet<>(); + executed = new HashSet<>(); + executed.add(STARTCODE); + for( eIndex = 0; eIndex < traceSize; eIndex++ ) { + totalEvents++; + event = trace.get(eIndex); + label = xEventClassifier.getClassIdentity(event); + LID = labelsToIDs.get(label); + +// System.out.println("DEBUG " + count++ + "- lifecycle: " + event.getAttributes().get("lifecycle:transition")); + + if(event.getAttributes().get("lifecycle:transition").toString().equalsIgnoreCase("START")) { + startEvents++; + for(int e : executing) { + if( parallelism[e*totalActivities + LID] == 0 ) totalConcurrencies+=2; + parallelism[e*totalActivities + LID]++; + parallelism[LID*totalActivities + e]++; + } + executing.add(LID); +// dfg[lastComplete*totalActivities + LID]++; + executed.add(LID); + } + + if(event.getAttributes().get("lifecycle:transition").toString().equalsIgnoreCase("COMPLETE")) { + completeEvents++; + if( executing.contains(LID) ) executing.remove(LID); +// else dfg[lastComplete*totalActivities + LID]++; + dfg[lastComplete*totalActivities + LID]++; + lastComplete = LID; + activityObserved[LID]++; + sTrace += ":" + labelsToIDs.get(label).toString() + ":"; + executed.add(LID); + } + } + dfg[lastComplete*totalActivities + endEvent]++; + sTrace += ":" + endEvent + "::"; + executed.add(endEvent); + + for(int a=0; a < totalActivities; a++) { + if(!executed.contains(a)) { + for(int x : executed) { + exclusiveness[x*totalActivities + a]++; + exclusiveness[a*totalActivities + x]++; + } + } + } + + traceLength = totalEvents - oldTotalEvents; + if( longestTrace < traceLength ) longestTrace = traceLength; + if( shortestTrace > traceLength ) shortestTrace = traceLength; + + if( !traces.containsKey(sTrace) ) traces.put(sTrace, 0); + traces.put(sTrace, traces.get(sTrace)+1); + } + + System.out.println("LOGP - total events parsed: " + totalEvents); + System.out.println("LOGP - start events parsed: " + startEvents); + System.out.println("LOGP - complete events parsed: " + completeEvents); +// System.out.println("LOGP - total concurrencies identified: " + totalConcurrencies); + + System.out.println("LOGP - total distinct events: " + (events.size() - 2) ); + System.out.println("LOGP - total distinct traces: " + traces.size() ); + +// for( String t : traces.keySet() ) System.out.println("DEBUG - ["+ traces.get(t) +"] trace: " + t); + +// System.out.println("DEBUG - final mapping:"); +// for( int code : events.keySet() ) System.out.println("DEBUG - " + code + " = " + events.get(code)); + + if( Math.abs(startEvents - completeEvents) < ((double)totalEvents*0.50) ) { + System.out.println("DEBUG - generating complex log"); + sLog = new ComplexLog(traces, events, log); + ((ComplexLog)sLog).setDFG(dfg); + ((ComplexLog)sLog).setConcurrencyMatrix(parallelism); + sLog.setExclusiveness(exclusiveness); + ((ComplexLog)sLog).setActivityObserved(activityObserved); + ((ComplexLog)sLog).computePercentages(); + + for(int i=0; i { + Map base; + + public TracesComparator(Map base) { + this.base = base; + } + + @Override + public int compare(String a, String b) { + if (base.get(a) >= base.get(b)) return -1; + else return 1; + // returning 0 would merge keys + } + } + +} \ No newline at end of file diff --git a/src/au/edu/qut/processmining/log/SimpleLog.java b/src/au/edu/qut/processmining/log/SimpleLog.java new file mode 100644 index 0000000..a5406a0 --- /dev/null +++ b/src/au/edu/qut/processmining/log/SimpleLog.java @@ -0,0 +1,97 @@ +/* + * Copyright © 2009-2018 The Apromore Initiative. + * + * This file is part of "Apromore". + * + * "Apromore" is free software; you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 3 of the + * License, or (at your option) any later version. + * + * "Apromore" is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty + * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + * See the GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this program. + * If not, see . + */ + +package au.edu.qut.processmining.log; + +import org.deckfour.xes.model.XLog; + +import java.util.HashMap; +import java.util.Map; + +/** + * Created by Adriano on 27/10/2016. + */ +public class SimpleLog { + private XLog xlog; + private Map traces; + private Map events; + private Map reverseMap; + private int size; + private long totalEvents; + + private long longestTrace; + private long shortestTrace; + + private int startcode; + private int endcode; + + private int[] exclusiveness; + public int[] getExclusiveness() { + return exclusiveness; + } + public void setExclusiveness(int[] exclusiveness) { + this.exclusiveness = exclusiveness; + } + + public SimpleLog(Map traces, Map events, XLog xlog) { + this.traces = traces; + this.events = events; + this.size = 0; + + totalEvents = -1; + longestTrace = -1; + shortestTrace = -1; + + for( int traceFrequency : traces.values() ) this.size += traceFrequency; + + this.xlog = xlog; + } + + public XLog getXLog() { return xlog; } + + public Map getTraces() { return traces; } + public Map getEvents() { return events; } + + public int size() { return size; } + public void setSize(int size) { this.size = size; } + + public Map getReverseMap() { return reverseMap; } + public void setReverseMap(Map reverseMap) { this.reverseMap = reverseMap; } + + public void setStartcode(int startcode){ this.startcode = startcode; } + public int getStartcode(){ return startcode; } + + public void setEndcode(int endcode){ this.endcode = endcode; } + public int getEndcode(){ return endcode; } + + public void setTotalEvents(long totalEvents) { this.totalEvents = totalEvents; } + public long getTotalEvents() { return totalEvents; } + + public int getDistinctTraces() { return traces.size(); } + public int getDistinctEvents() { return (events.size()-2); } + + public void setLongestTrace(long length) { longestTrace = length; } + public long getLongestTrace() { return longestTrace; } + + public void setShortestTrace(long length) { shortestTrace = length; } + public long getShortestTrace() {return shortestTrace; } + + public int getAvgTraceLength(){ return (int)totalEvents/size; } +} diff --git a/src/au/edu/qut/processmining/log/graph/LogEdge.java b/src/au/edu/qut/processmining/log/graph/LogEdge.java new file mode 100644 index 0000000..366d949 --- /dev/null +++ b/src/au/edu/qut/processmining/log/graph/LogEdge.java @@ -0,0 +1,78 @@ +/* + * Copyright © 2009-2018 The Apromore Initiative. + * + * This file is part of "Apromore". + * + * "Apromore" is free software; you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 3 of the + * License, or (at your option) any later version. + * + * "Apromore" is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty + * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + * See the GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this program. + * If not, see . + */ + +package au.edu.qut.processmining.log.graph; + +import java.util.UUID; + +/** + * Created by Adriano on 15/06/2016. + */ + +public class LogEdge implements Comparable { + protected String id; + protected String label; + protected LogNode source; + protected LogNode target; + + public LogEdge() { + id = UUID.randomUUID().toString(); + source = null; + target = null; + } + + public LogEdge(LogNode source, LogNode target){ + id = UUID.randomUUID().toString(); + this.source = source; + this.target = target; + } + public LogEdge(LogNode source, LogNode target, String label){ + id = UUID.randomUUID().toString(); + this.source = source; + this.target = target; + this.label = label; + } + + public String getID() { return id; } + + public void setLabel(String label) { this.label = label; } + public String getLabel() { return label; } + + public void setSource(LogNode source){ this.source = source; } + public LogNode getSource(){ return source; } + public int getSourceCode() { return source.getCode(); } + + + public void setTarget(LogNode target) { this.target = target; } + public LogNode getTarget(){ return target; } + public int getTargetCode() { return target.getCode(); } + + @Override + public int compareTo(Object o) { + if( o instanceof LogEdge) return id.compareTo(((LogEdge)o).getID()); + else return -1; + } + + @Override + public boolean equals(Object o) { + if( o instanceof LogEdge) return id.equals(((LogEdge)o).getID()); + else return false; + } +} diff --git a/src/au/edu/qut/processmining/log/graph/LogNode.java b/src/au/edu/qut/processmining/log/graph/LogNode.java new file mode 100644 index 0000000..e76932c --- /dev/null +++ b/src/au/edu/qut/processmining/log/graph/LogNode.java @@ -0,0 +1,95 @@ +/* + * Copyright © 2009-2018 The Apromore Initiative. + * + * This file is part of "Apromore". + * + * "Apromore" is free software; you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 3 of the + * License, or (at your option) any later version. + * + * "Apromore" is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty + * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + * See the GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this program. + * If not, see . + */ + +package au.edu.qut.processmining.log.graph; + +import java.util.UUID; + +/** + * Created by Adriano on 15/06/2016. + */ +public class LogNode implements Comparable { + protected String id; + protected String label; + protected int code; + + protected int frequency; + protected int startFrequency; + protected int endFrequency; + + public LogNode() { + id = UUID.randomUUID().toString(); + label = "null"; + frequency = 0; + startFrequency = 0; + endFrequency = 0; + } + + public LogNode(String label) { + id = UUID.randomUUID().toString(); + frequency = 0; + startFrequency = 0; + endFrequency = 0; + this.label = label; + } + public LogNode(String label, int code) { + id = Integer.toString(code); + frequency = 0; + startFrequency = 0; + endFrequency = 0; + this.label = label; + this.code = code; + } + + public String getID() { return id; } + + public void setLabel(String label) { this.label = label; } + public String getLabel() { return label; } + +// public void setCode(int code) { this.code = code; } + public int getCode() { return code; } + + public void increaseFrequency() { frequency++; } + public void increaseFrequency(int amount) { frequency += amount; } + + public int getFrequency(){ return frequency; } + + public void incStartFrequency() { startFrequency++; } + public void incEndFrequency() { endFrequency++; } + + public int getStartFrequency(){ return startFrequency;} + public int getEndFrequency(){ return endFrequency;} + + public boolean isStartEvent() { return startFrequency != 0; } + public boolean isEndEvent() { return endFrequency != 0; } + + @Override + public int compareTo(Object o) { + if( o instanceof LogNode) return id.compareTo(((LogNode)o).getID()); + else return -1; + } + + @Override + public boolean equals(Object o) { + if( o instanceof LogNode) return id.equals(((LogNode)o).getID()); + else return false; + } + +} diff --git a/src/au/edu/qut/prom/helpers/ConsoleProgress.java b/src/au/edu/qut/prom/helpers/ConsoleProgress.java new file mode 100644 index 0000000..9e9a0a9 --- /dev/null +++ b/src/au/edu/qut/prom/helpers/ConsoleProgress.java @@ -0,0 +1,74 @@ +package au.edu.qut.prom.helpers; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.processmining.framework.plugin.Progress; + +public class ConsoleProgress implements Progress { + + private static Logger LOGGER = LogManager.getLogger(); + + private int max = 100; + private int current = 0; + private boolean show = true; + private String message = "-> "; + private int progressIndicatorSample = 500; + + public void setValue(int value) { + current = value; + show(); + } + + public void setMinimum(int value) { + } + + public void setMaximum(int value) { + max = value; + } + + public void setIndeterminate(boolean makeIndeterminate) { + show = !makeIndeterminate; + } + + public void setCaption(String message) { + this.message = message; + } + + public boolean isIndeterminate() { + return show; + } + + public boolean isCancelled() { + return false; + } + + public void inc() { + current++; + show(); + } + + public int getValue() { + return current; + } + + public int getMinimum() { + return 0; + } + + public int getMaximum() { + return max; + } + + public String getCaption() { + return message; + } + + public void cancel() { + } + + private void show() { + if (show && (current % progressIndicatorSample == 0 )) { + LOGGER.debug(message + " -> (" + current + " / " + max + " )" ); + } + } +} \ No newline at end of file diff --git a/src/au/edu/qut/prom/helpers/ConsoleUIPluginContext.java b/src/au/edu/qut/prom/helpers/ConsoleUIPluginContext.java new file mode 100644 index 0000000..c3adaba --- /dev/null +++ b/src/au/edu/qut/prom/helpers/ConsoleUIPluginContext.java @@ -0,0 +1,344 @@ +package au.edu.qut.prom.helpers; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Comparator; +import java.util.HashSet; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.deckfour.xes.model.XLog; +import org.processmining.framework.connections.Connection; +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.connections.ConnectionManager; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.PluginContextID; +import org.processmining.framework.plugin.PluginDescriptor; +import org.processmining.framework.plugin.PluginExecutionResult; +import org.processmining.framework.plugin.PluginManager; +import org.processmining.framework.plugin.PluginParameterBinding; +import org.processmining.framework.plugin.ProMFuture; +import org.processmining.framework.plugin.Progress; +import org.processmining.framework.plugin.RecursiveCallException; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.events.Logger.MessageLevel; +import org.processmining.framework.plugin.events.ProgressEventListener.ListenerList; +import org.processmining.framework.plugin.impl.FieldSetException; +import org.processmining.framework.plugin.impl.PluginManagerImpl; +import org.processmining.framework.providedobjects.ProvidedObjectManager; +import org.processmining.framework.providedobjects.impl.ProvidedObjectManagerImpl; +import org.processmining.framework.util.Cast; +import org.processmining.framework.util.Pair; + +/** + * For running plugins in headless mode from test scaffold or command line, even + * if they claim to require UIPluginContext and therefore be GUI only. Adapted + * from Andreas Rogge-Solti's StochasticNetUtils. + */ + +public class ConsoleUIPluginContext implements PluginContext { + + private static Logger LOGGER = LogManager.getLogger(); + + private Progress progress; + private ProvidedObjectManager objectManager; + private ConnectionManager connectionManager; + + public ConsoleUIPluginContext() { + this.progress = new ConsoleProgress(); + this.objectManager = new ProvidedObjectManagerImpl(); + PluginManagerImpl.initialize(PluginContext.class); + this.connectionManager = new HeadlessConnectionManager(); + } + + public PluginManager getPluginManager() { + return null; + } + + public ProvidedObjectManager getProvidedObjectManager() { + return objectManager; + } + + public ConnectionManager getConnectionManager() { + return connectionManager; + } + + public PluginContextID createNewPluginContextID() { + return null; + } + + public void invokePlugin(PluginDescriptor plugin, int index, Object... objects) { + } + + public void invokeBinding(PluginParameterBinding binding, Object... objects) { + } + + public Class getPluginContextType() { + return null; + } + + public Collection tryToFindOrConstructAllObjects(Class type, + Class connectionType, String role, Object... input) throws ConnectionCannotBeObtained { + return null; + } + + public T tryToFindOrConstructFirstObject(Class type, Class connectionType, + String role, Object... input) throws ConnectionCannotBeObtained { + return findOrConstructAllObjects(true, type, null, connectionType, role, input).iterator().next(); + } + + private Collection findOrConstructAllObjects(boolean stopAtFirst, Class type, + String name, Class connectionType, String role, Object... input) throws ConnectionCannotBeObtained { + + Collection accepted = new ArrayList(); + try { + for (C conn : getConnectionManager().getConnections(connectionType, this, input)) { + Object object = conn.getObjectWithRole(role); + if (type.isAssignableFrom(object.getClass())) { + accepted.add(Cast.cast(object)); + } + } + } catch (Exception e) { + // Don't care, let's try to construct later + } + if (!accepted.isEmpty()) { + return accepted; + } + try { + return constructAllObjects(stopAtFirst, type, name, input); + } catch (Exception e) { + throw new ConnectionCannotBeObtained(e.getMessage(), connectionType); + } + } + + private Collection constructAllObjects(boolean stopAtFirst, Class type, + String name, Object... input) throws CancellationException, InterruptedException, ExecutionException { + Class[] types; + if (input != null) { + types = new Class[input.length]; + for (int i = 0; i < input.length; i++) { + types[i] = input[i].getClass(); + } + } else { + types = new Class[0]; + input = new Object[0]; + } + + // Find available plugins + Set> set = getPluginManager().find(Plugin.class, type, + getPluginContextType(), true, false, false, types); + + if (set.isEmpty()) { + throw new RuntimeException("No plugin available to build this type of object: " + type.toString()); + } + + // Filter on the given name, if given. + if (name != null) { + Set> filteredSet = new HashSet>(); + for (Pair pair : set) { + if (name.equals(pair.getSecond().getPlugin().getName())) { + filteredSet.add(pair); + } + } + set.clear(); + set.addAll(filteredSet); + } + + if (set.isEmpty()) { + throw new RuntimeException("No named plugin available to build this type of object: " + name + ", " + + type.toString()); + } + + SortedSet> plugins = new TreeSet>( + new Comparator>() { + + public int compare(Pair arg0, + Pair arg1) { + int c = arg0.getSecond().getPlugin().getReturnNames().size() + - arg1.getSecond().getPlugin().getReturnNames().size(); + if (c == 0) { + c = arg0.getSecond().compareTo(arg1.getSecond()); + } + if (c == 0) { + c = arg0.getFirst() - arg1.getFirst(); + } + return c; + } + + }); + plugins.addAll(set); + + Collection result = new ArrayList(stopAtFirst ? 1 : plugins.size()); + + // get the first available plugin + ExecutionException ex = null; + for (Pair pair : plugins) { + PluginParameterBinding binding = pair.getSecond(); + // create a context to execute this plugin in + PluginContext child = createChildContext("Computing: " + type.toString()); + getPluginLifeCycleEventListeners().firePluginCreated(child); + + // Invoke the binding + PluginExecutionResult pluginResult = binding.invoke(child, input); + + // synchronize on the required result and continue + try { + pluginResult.synchronize(); + + // get all results and pass them to the framework as provided objects + getProvidedObjectManager().createProvidedObjects(child); + result.add(pluginResult.getResult(pair.getFirst())); + if (stopAtFirst) { + break; + } + } catch (ExecutionException e) { + // Try next plugin if stop at first, otherwise rethrow + ex = e; + } finally { + child.getParentContext().deleteChild(child); + } + } + if (result.isEmpty()) { + assert (ex != null); + throw ex; + } + return result; + } + + public T tryToFindOrConstructFirstNamedObject(Class type, String name, + Class connectionType, String role, Object... input) throws ConnectionCannotBeObtained { + return null; + } + + public PluginContext createChildContext(String label) { + return null; + } + + public Progress getProgress() { + return progress; + } + + public ListenerList getProgressEventListeners() { + return null; + } + + public org.processmining.framework.plugin.events.PluginLifeCycleEventListener.List getPluginLifeCycleEventListeners() { + return null; + } + + public PluginContextID getID() { + return null; + } + + public String getLabel() { + return null; + } + + public Pair getPluginDescriptor() { + return null; + } + + public PluginContext getParentContext() { + return null; + } + + public java.util.List getChildContexts() { + return null; + } + + public PluginExecutionResult getResult() { + return null; + } + + public ProMFuture getFutureResult(int i) { + return new ProMFuture(XLog.class, "name") { + + @Override + protected Object doInBackground() throws Exception { + return new Object(); + } + }; + } + + public Executor getExecutor() { + return null; + } + + public boolean isDistantChildOf(PluginContext context) { + return false; + } + + public void setFuture(PluginExecutionResult resultToBe) { + + } + + public void setPluginDescriptor(PluginDescriptor descriptor, int methodIndex) throws FieldSetException, + RecursiveCallException { + + } + + public boolean hasPluginDescriptorInPath(PluginDescriptor descriptor, int methodIndex) { + return false; + } + + public void log(String message, MessageLevel level) { + LOGGER.log(promToLogLevel(level), message); + } + + private static Level promToLogLevel(MessageLevel level) { + Level result = Level.INFO; + switch (level){ + case DEBUG: + result = Level.DEBUG; + break; + case ERROR: + result = Level.ERROR; + break; + case NORMAL: + result = Level.INFO; + break; + case TEST: + result = Level.DEBUG; + break; + case WARNING: + result = Level.WARN; + break; + } + return result; + } + + public void log(String message) { + LOGGER.info(message); + } + + public void log(Throwable exception) { + LOGGER.error("Plugin error",exception); + } + + public org.processmining.framework.plugin.events.Logger.ListenerList getLoggingListeners() { + return null; + } + + public PluginContext getRootContext() { + return null; + } + + public boolean deleteChild(PluginContext child) { + return false; + } + + public T addConnection(T c) { + return connectionManager.addConnection(c); + } + + public void clear() { + } + +} diff --git a/src/au/edu/qut/prom/helpers/HeadlessConnectionManager.java b/src/au/edu/qut/prom/helpers/HeadlessConnectionManager.java new file mode 100644 index 0000000..068ac4d --- /dev/null +++ b/src/au/edu/qut/prom/helpers/HeadlessConnectionManager.java @@ -0,0 +1,81 @@ +package au.edu.qut.prom.helpers; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import org.processmining.framework.connections.Connection; +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.connections.ConnectionID; +import org.processmining.framework.connections.ConnectionManager; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.events.ConnectionObjectListener; + + +/** + * For running plugins in headless mode from test scaffold or command line, even if they claim to + * require UIPluginContext and therefore + * be GUI only. Adapted from Andreas Rogge-Solti's StochasticNetUtils + * + */ +public class HeadlessConnectionManager implements ConnectionManager { + + private final Map connections = new HashMap(); + + public HeadlessConnectionManager() { + } + + public void setEnabled(boolean isEnabled) { + } + + public boolean isEnabled() { + return false; + } + + public T getFirstConnection(Class connectionType, PluginContext context, + Object... objects) throws ConnectionCannotBeObtained + { + // Leemans just refuses to provide connections and it seems to go smoother + throw new ConnectionCannotBeObtained("Connections aren't provided during headless run", connectionType, + objects); + // Rogge-Solti tries to maintain connections and it causes NPEs when they are marked removed and + // fall out of memory - see method of this name in StochasticNetUtils + } + + public Collection getConnections(Class connectionType, PluginContext context, + Object... objects) throws ConnectionCannotBeObtained { + throw new ConnectionCannotBeObtained("Connections aren't provided during headless run", connectionType, + objects); + } + + public org.processmining.framework.plugin.events.ConnectionObjectListener.ListenerList getConnectionListeners() { + org.processmining.framework.plugin.events.ConnectionObjectListener.ListenerList list = new ConnectionObjectListener.ListenerList(); + return list; + } + + public Collection getConnectionIDs() { + java.util.List list = new ArrayList<>(); + return list; + } + + public Connection getConnection(ConnectionID id) throws ConnectionCannotBeObtained { + if (connections.containsKey(id)) { + return connections.get(id); + } + throw new ConnectionCannotBeObtained("No connection with id " + id.toString(), null); + } + + public void clear() { + this.connections.clear(); + } + + public T addConnection(T connection) { + connections.put(connection.getID(), connection); + connection.setManager(this); + return connection; + } + +} + + diff --git a/src/au/edu/qut/prom/helpers/HeadlessDefinitelyNotUIPluginContext.java b/src/au/edu/qut/prom/helpers/HeadlessDefinitelyNotUIPluginContext.java new file mode 100644 index 0000000..a10d59b --- /dev/null +++ b/src/au/edu/qut/prom/helpers/HeadlessDefinitelyNotUIPluginContext.java @@ -0,0 +1,235 @@ +package au.edu.qut.prom.helpers; + +import java.io.File; +import java.io.IOException; +import java.util.Collection; +import java.util.concurrent.Executor; + +import javax.swing.JComponent; +import javax.swing.filechooser.FileFilter; + +import org.deckfour.uitopia.api.event.TaskListener.InteractionResult; +import org.processmining.contexts.uitopia.UIContext; +import org.processmining.contexts.uitopia.UIPluginContext; +import org.processmining.contexts.uitopia.model.ProMTask; +import org.processmining.framework.connections.Connection; +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.connections.ConnectionManager; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.PluginContextID; +import org.processmining.framework.plugin.PluginDescriptor; +import org.processmining.framework.plugin.PluginExecutionResult; +import org.processmining.framework.plugin.PluginManager; +import org.processmining.framework.plugin.PluginParameterBinding; +import org.processmining.framework.plugin.ProMFuture; +import org.processmining.framework.plugin.Progress; +import org.processmining.framework.plugin.RecursiveCallException; +import org.processmining.framework.plugin.events.Logger.MessageLevel; +import org.processmining.framework.plugin.events.PluginLifeCycleEventListener.List; +import org.processmining.framework.plugin.events.ProgressEventListener.ListenerList; +import org.processmining.framework.plugin.impl.FieldSetException; +import org.processmining.framework.providedobjects.ProvidedObjectManager; +import org.processmining.framework.util.Pair; + +/** + * Usually it's enough for the type signature to be a PluginContext. But some code specifically + * checks if the object is an instanceof UIPluginContext, and then starts throwing dialog boxes + * up all over your nice new shoes. + * + * @author burkeat + * + */ +public class HeadlessDefinitelyNotUIPluginContext implements PluginContext{ + + private HeadlessUIPluginContext delegate; + + public HeadlessDefinitelyNotUIPluginContext(PluginContext context, String label) { + delegate = new HeadlessUIPluginContext (context,label); + } + + public int hashCode() { + return delegate.hashCode(); + } + + public ConnectionManager getConnectionManager() { + return delegate.getConnectionManager(); + } + + public T addConnection(T c) { + return delegate.addConnection(c); + } + + public void clear() { + delegate.clear(); + } + + public Progress getProgress() { + return delegate.getProgress(); + } + + public ProMFuture getFutureResult(int i) { + return delegate.getFutureResult(i); + } + + public void setFuture(PluginExecutionResult futureToBe) { + delegate.setFuture(futureToBe); + } + + public PluginExecutionResult getResult() { + return delegate.getResult(); + } + + public Executor getExecutor() { + return delegate.getExecutor(); + } + + public void log(String message, MessageLevel level) { + delegate.log(message, level); + } + + public void log(String message) { + delegate.log(message); + } + + public void log(Throwable exception) { + delegate.log(exception); + } + + public File openFile(FileFilter filter) throws IOException { + return delegate.openFile(filter); + } + + public File saveFile(String defaultExtension, String... extensions) throws IOException { + return delegate.saveFile(defaultExtension, extensions); + } + + public File[] openFiles(FileFilter filter) throws IOException { + return delegate.openFiles(filter); + } + + public Pair getPluginDescriptor() { + return delegate.getPluginDescriptor(); + } + + public UIContext getGlobalContext() { + return delegate.getGlobalContext(); + } + + public List getPluginLifeCycleEventListeners() { + return delegate.getPluginLifeCycleEventListeners(); + } + + public UIPluginContext getRootContext() { + return delegate.getRootContext(); + } + + public ListenerList getProgressEventListeners() { + return delegate.getProgressEventListeners(); + } + + public void setTask(ProMTask task) { + delegate.setTask(task); + } + + public ProMTask getTask() { + return delegate.getTask(); + } + + public PluginContextID getID() { + return delegate.getID(); + } + + public InteractionResult showConfiguration(String title, JComponent configuration) { + return delegate.showConfiguration(title, configuration); + } + + public String getLabel() { + return delegate.getLabel(); + } + + public boolean hasPluginDescriptorInPath(PluginDescriptor plugin, int methodIndex) { + return delegate.hasPluginDescriptorInPath(plugin, methodIndex); + } + + public InteractionResult showWizard(String title, boolean first, boolean last, JComponent configuration) { + return delegate.showWizard(title, first, last, configuration); + } + + public void setPluginDescriptor(PluginDescriptor descriptor, int methodIndex) + throws FieldSetException, RecursiveCallException { + delegate.setPluginDescriptor(descriptor, methodIndex); + } + + public UIPluginContext createChildContext(String label) { + return delegate.createChildContext(label); + } + + public java.util.List getChildContexts() { + return delegate.getChildContexts(); + } + + public PluginContext getParentContext() { + return delegate.getParentContext(); + } + + public boolean equals(Object o) { + return delegate.equals(o); + } + + public String toString() { + return delegate.toString(); + } + + public boolean isDistantChildOf(PluginContext context) { + return delegate.isDistantChildOf(context); + } + + public PluginManager getPluginManager() { + return delegate.getPluginManager(); + } + + public ProvidedObjectManager getProvidedObjectManager() { + return delegate.getProvidedObjectManager(); + } + + public PluginContextID createNewPluginContextID() { + return delegate.createNewPluginContextID(); + } + + public void invokePlugin(PluginDescriptor plugin, int index, Object... objects) { + delegate.invokePlugin(plugin, index, objects); + } + + public void invokeBinding(PluginParameterBinding binding, Object... objects) { + delegate.invokeBinding(binding, objects); + } + + public org.processmining.framework.plugin.events.Logger.ListenerList getLoggingListeners() { + return delegate.getLoggingListeners(); + } + + public boolean deleteChild(PluginContext child) { + return delegate.deleteChild(child); + } + + public Class getPluginContextType() { + return delegate.getPluginContextType(); + } + + public Collection tryToFindOrConstructAllObjects(Class type, + Class connectionType, String role, Object... input) throws ConnectionCannotBeObtained { + return delegate.tryToFindOrConstructAllObjects(type, connectionType, role, input); + } + + public T tryToFindOrConstructFirstObject(Class type, Class connectionType, + String role, Object... input) throws ConnectionCannotBeObtained { + return delegate.tryToFindOrConstructFirstObject(type, connectionType, role, input); + } + + public T tryToFindOrConstructFirstNamedObject(Class type, String name, + Class connectionType, String role, Object... input) throws ConnectionCannotBeObtained { + return delegate.tryToFindOrConstructFirstNamedObject(type, name, connectionType, role, input); + } + + +} diff --git a/src/au/edu/qut/prom/helpers/HeadlessUIPluginContext.java b/src/au/edu/qut/prom/helpers/HeadlessUIPluginContext.java new file mode 100644 index 0000000..4ca1dce --- /dev/null +++ b/src/au/edu/qut/prom/helpers/HeadlessUIPluginContext.java @@ -0,0 +1,87 @@ +package au.edu.qut.prom.helpers; + +import org.processmining.contexts.uitopia.UIContext; +import org.processmining.contexts.uitopia.UIPluginContext; +import org.processmining.framework.connections.Connection; +import org.processmining.framework.connections.ConnectionManager; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.PluginExecutionResult; +import org.processmining.framework.plugin.ProMFuture; +import org.processmining.framework.plugin.Progress; +import org.processmining.framework.plugin.events.Logger; + +/** + * For running plugins in headless mode from test scaffold or command line, even if they claim to require UIPluginContext and therefore + * be GUI only. Adapted from Andreas Rogge-Solti's StochasticNetUtils.FakePluginContext and Sander Leemans' + * thesis.helperClasses.FakeContext. Uses a single static main plugin context. + * + * @author burkeat + * + */ +public class HeadlessUIPluginContext extends UIPluginContext{ + + private static UIPluginContext MAIN_PLUGIN_CONTEXT; + + static { + UIContext MAIN_CONTEXT = new UIContext(); + MAIN_PLUGIN_CONTEXT = MAIN_CONTEXT.getMainPluginContext().createChildContext("HeadlessPluginContext"); + } + + private PluginContext context; + + public HeadlessUIPluginContext(PluginContext context, String label) { + super(MAIN_PLUGIN_CONTEXT,label); + this.context = context; + } + + @Override + public ConnectionManager getConnectionManager() { + return this.context.getConnectionManager(); + } + + @Override + public T addConnection(T c) { + return this.context.addConnection(c); + } + + @Override + public void clear() { + this.context.clear(); + } + + @Override + public Progress getProgress() { + return context.getProgress(); + } + + @Override + public ProMFuture getFutureResult(int i) { + return context.getFutureResult(i); + } + + @Override + public void setFuture(PluginExecutionResult futureToBe) { + context.setFuture(futureToBe); + } + + @Override + public PluginExecutionResult getResult() { + return context.getResult(); + } + + @Override + public void log(String message, Logger.MessageLevel level) { + context.log(message, level); + } + + @Override + public void log(String message) { + context.log(message); + } + + @Override + public void log(Throwable exception) { + context.log(exception); + } + +} diff --git a/src/au/edu/qut/prom/helpers/PackageListingExporter.java b/src/au/edu/qut/prom/helpers/PackageListingExporter.java new file mode 100644 index 0000000..761674b --- /dev/null +++ b/src/au/edu/qut/prom/helpers/PackageListingExporter.java @@ -0,0 +1,228 @@ +package au.edu.qut.prom.helpers; + +import java.io.FileWriter; +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.SortedSet; + +import org.processmining.contexts.uitopia.UIPluginContext; +import org.processmining.contexts.uitopia.annotations.UIExportPlugin; +import org.processmining.contexts.uitopia.annotations.UIImportPlugin; +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.contexts.uitopia.annotations.Visualizer; +import org.processmining.contexts.uitopia.packagemanager.PMController; +import org.processmining.contexts.uitopia.packagemanager.PMPackage; +import org.processmining.framework.boot.Boot; +import org.processmining.framework.packages.PackageDescriptor; +import org.processmining.framework.packages.PackageManager; +import org.processmining.framework.plugin.PluginDescriptor; +import org.processmining.framework.util.HTMLToString; +import org.processmining.plugins.ShowPackageOverviewPlugin; + +/** + * Partially working package and plugin listing. The package listing works, the plugin listing + * doesn't. Based on org.processmining.plugins.ShowPackageOverviewPlugin in prom core + * plugins package. Idea was to output more info available for help etc, but it's an experiment + * that didn't work out. For now, just run PromM and run the "Show Package Overview" plugin. You + * can then copy/paste the output into a spreadsheet, like an animal. + * + * @author burkeat + * + */ +public class PackageListingExporter { + + + private static String toExtendedHTMLString(Collection pluginDescriptors, + boolean includeHTMLTags) + { + StringBuffer buffer = new StringBuffer(); + + if (includeHTMLTags) { + buffer.append(""); + } + buffer.append("

ProM Package Overview

"); + + PMController packageController = new PMController(Boot.Level.NONE); + List uptodatePackages = packageController.getToUninstallPackages(); + List outofdatePackages = packageController.getToUpdatePackages(); + buffer.append("

Installed packages

"); + packageListAsTable(buffer, uptodatePackages); + buffer.append("

Updates available

"); + packageListAsTable(buffer, outofdatePackages); + + buffer.append("

Available plug-ins

"); + buffer.append(""); + buffer.append(""); + for (PluginDescriptor pluginDescriptor : pluginDescriptors) { + String uiName = null; + boolean isUITopia = false; + UITopiaVariant variant = pluginDescriptor.getAnnotation(UITopiaVariant.class); + if (variant != null) { + uiName = variant.uiLabel(); + isUITopia = true; + variantAsRow(buffer, pluginDescriptor, uiName, variant); + } + Visualizer visualizer = pluginDescriptor.getAnnotation(Visualizer.class); + if (visualizer != null) { + uiName = visualizer.name(); + isUITopia = true; + visualizerAsRow(buffer, pluginDescriptor, uiName); + } + UIImportPlugin importPlugin = pluginDescriptor.getAnnotation(UIImportPlugin.class); + if (importPlugin != null) { + uiName = pluginDescriptor.getName(); + isUITopia = true; + importPluginAsRow(buffer, pluginDescriptor, uiName); + } + UIExportPlugin exportPlugin = pluginDescriptor.getAnnotation(UIExportPlugin.class); + if (exportPlugin != null) { + uiName = pluginDescriptor.getName(); + isUITopia = true; + uiExportPluginAsRow(buffer, pluginDescriptor, uiName); + } + for (int i = 0; i < pluginDescriptor.getNumberOfMethods(); i++) { + variant = pluginDescriptor.getAnnotation(UITopiaVariant.class, i); + if (variant != null) { + uiName = variant.uiLabel(); + isUITopia = true; + variantAsRow(buffer, pluginDescriptor, uiName, variant); + } + } + if (!isUITopia) { + nonUITopiaRow(buffer, pluginDescriptor, uiName); + } + } + buffer.append("
Plug-in nameUITopiaUITopia namePackage nameAuthor nameDescriptionHelp
"); + if (includeHTMLTags) { + buffer.append(""); + } + return buffer.toString(); + + } + + + private static void nonUITopiaRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName) { + buffer.append(""); + buffer.append("" + pluginDescriptor.getName() + ""); + buffer.append(""); + buffer.append("" + (uiName == null ? "" : uiName) + ""); + packageCells(buffer, pluginDescriptor); + } + + + private static void packageCells(StringBuffer buffer, PluginDescriptor pluginDescriptor) { + String packName = null; + String authorName = null; + String description = null; + PackageDescriptor packageDescriptor = pluginDescriptor.getPackage(); + if (packageDescriptor != null) { + packName = packageDescriptor.getName(); + authorName = packageDescriptor.getAuthor(); + description = packageDescriptor.getDescription(); + } + buffer.append("" + (packName == null ? "" : packName) + ""); + buffer.append("" + (authorName == null ? "" : authorName) + ""); + cell( (description == null ? "" : description), buffer ); + buffer.append(""); + } + + + private static void uiExportPluginAsRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName) { + buffer.append(""); + buffer.append("" + pluginDescriptor.getName() + ""); + buffer.append("Export"); + buffer.append("" + (uiName == null ? "" : uiName) + ""); + packageCells(buffer, pluginDescriptor); + } + + + private static void importPluginAsRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName) { + buffer.append(""); + buffer.append("" + pluginDescriptor.getName() + ""); + buffer.append("Import"); + buffer.append("" + (uiName == null ? "" : uiName) + ""); + packageCells(buffer, pluginDescriptor); + } + + + private static void visualizerAsRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName) { + buffer.append(""); + buffer.append("" + pluginDescriptor.getName() + ""); + buffer.append("Visualizer"); + buffer.append("" + (uiName == null ? "" : uiName) + ""); + packageCells(buffer, pluginDescriptor); + } + + + private static void variantAsRow(StringBuffer buffer, PluginDescriptor pluginDescriptor, String uiName, + UITopiaVariant variant) { + buffer.append(""); + buffer.append("" + pluginDescriptor.getName() + ""); + buffer.append("Plug-in variant"); + buffer.append("" + (uiName == null ? "" : uiName) + ""); + String packName = null; + PackageDescriptor packageDescriptor = pluginDescriptor.getPackage(); + if (packageDescriptor != null) { + packName = packageDescriptor.getName(); + } + buffer.append("" + (packName == null ? "" : packName) + ""); + buffer.append("" + variant.author() + ""); + cell( variant.uiHelp() , buffer); + buffer.append(""); + } + + private static void packageListAsTable(StringBuffer buffer, List uptodatePackages) { + buffer.append(""); + buffer.append(""); + for (PMPackage pack : uptodatePackages) { + buffer.append(""); + buffer.append(""); + buffer.append(""); + buffer.append(""); + buffer.append(""); + cell(pack.getDescription(),buffer); + buffer.append(""); + for (String s : pack.getDependencies()) { + buffer.append(""); + } + } + buffer.append("
PackageDependencyVersionAuthorDescription
" + pack.getPackageName() + "" + pack.getVersion() + "" + pack.getAuthorName() + "
" + s + "
"); + } + + private static void cell(String contents, StringBuffer buffer) { + buffer.append("" + contents + ""); + } + + public static void standardListing(String fileName) throws IOException{ + FileWriter writer = new FileWriter(fileName); + UIPluginContext uipc = + new HeadlessUIPluginContext(new ConsoleUIPluginContext(), "show_package_exporter"); + HTMLToString output = ShowPackageOverviewPlugin.info(uipc); + writer.write(output.toHTMLString(true)); + writer.close(); + } + + public static void extendedListing(String fileName) throws IOException{ + FileWriter writer = new FileWriter(fileName); + UIPluginContext uipc = + new HeadlessUIPluginContext(new ConsoleUIPluginContext(), "show_package_exporter"); + System.out.println("extendedListing()"); + PackageManager manager = PackageManager.getInstance(); + manager.initialize(Boot.Level.ALL); + Set packages = manager.getAvailablePackages(); + System.out.println("Total packages:" + packages.size()); + SortedSet allPlugins = uipc.getPluginManager().getAllPlugins(); + System.out.println("Total plugins:" + allPlugins.size()); + // TODO: at time of writing, no plugins are initialized, so this list is empty + // which takes away the most useful aspect of this little tool. + writer.write(toExtendedHTMLString(allPlugins, true)); + writer.close(); + } + + public static void main(String[] args) throws IOException{ + // standardListing("prompackages.html"); + extendedListing("prompackagesextended.html"); + } +} diff --git a/src/au/edu/qut/prom/helpers/PetriNetFragmentParser.java b/src/au/edu/qut/prom/helpers/PetriNetFragmentParser.java new file mode 100644 index 0000000..1907051 --- /dev/null +++ b/src/au/edu/qut/prom/helpers/PetriNetFragmentParser.java @@ -0,0 +1,372 @@ +package au.edu.qut.prom.helpers; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.acceptingpetrinet.models.impl.AcceptingPetriNetImpl; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.graphbased.directed.petrinet.impl.StochasticNetImpl; +import org.processmining.models.semantics.petrinet.Marking; + +/** + * Allows the creation of Petri nets with short one line ascii sketches, for example + * initialPlace -> [transition1] -> mp -> [transition2] -> finalPlace + * + * Larger nets can be created with multiple invocations. Existing nodes will be looked up by + * label. + * + * Weighted transitions without weights, as in {b}, are defaulted to weight 1.0. + * + * Current limitations: no support for SPNs beyond weighted transitions. No support for separate + * nodes with duplicate labels. + * + * Methods for creating {@code AcceptingPetriNets} are also provided. These use naming + * conventions to identify initial and final markings per + * {@link #createAcceptingNet(String, String)}. + * + * Grammar + * + *
+ * PETRI_ONELINE_NET 	:: PLACE EDGE TRANSITION EDGE PLACE_LED_SUBNET
+ * PLACE_LED_SUBNET  	:: PLACE EDGE TRANSITION EDGE PLACE_LED_SUBNET
+ * PLACE_LED_SUBNET  	:: PLACE 
+ * TRANSITION_SUBNET 	:: TRANSITION EDGE PLACE EDGE TRANSITION_SUBNET
+ * TRANSITION_SUBNET 	:: TRANSITION 
+ * TRANSITION        	:: SIMPLE_TRANSITION || WEIGHTED_TRANSITION
+ * SIMPLE_TRANSITION 	:: '[' LABEL ']'
+ * WEIGHTED_TRANSITION  :: WEIGHTED_TRAN_VALUE | WEIGHTED_TRAN_DEFAULT
+ * WEIGHTED_TRAN_VALUE  :: '{' LABEL WEIGHT '}'
+ * WEIGHTED_TRAN_DEFAULT:: '{' LABEL '}'
+ * WEIGHT			 	:: [0-9].[0-9]*
+ * PLACE             	:: LABEL
+ * EDGE              	:: '->' 
+ * LABEL             	:: alphanumeric string
+ * 
+ * + * Doesn't work for extended codepoints (eg UTF-16). + * + * @param netText + * @return + */ +public class PetriNetFragmentParser{ + + private static enum TokenInfo{ + SIMPLE_TRANSITION("\\[[a-zA-Z][a-zA-Z0-9]*\\]"), + WEIGHTED_DEFAULT_TRANSITION("\\{[a-zA-Z][a-zA-Z0-9]*\\}"), + WEIGHTED_VALUE_TRANSITION("\\{[a-zA-Z][a-zA-Z0-9]*\\s[0-9]*\\.[0-9]*\\}"), + EDGE("->"), + PLACE("[a-zA-Z][a-zA-Z0-9]*"), + TERMINAL(""); + + public static final TokenInfo[] LEX_VALUES = + {SIMPLE_TRANSITION,WEIGHTED_DEFAULT_TRANSITION,WEIGHTED_VALUE_TRANSITION,EDGE,PLACE}; + + private Pattern pattern; + + private TokenInfo(String regex){ + this.pattern = Pattern.compile("^\\s*("+regex+")"); + } + + + } + + private static class Token{ + public final PetriNetFragmentParser.TokenInfo tokenInfo; + public final String sequence; + + public Token(PetriNetFragmentParser.TokenInfo token, String sequence) { + this.tokenInfo = token; + this.sequence = sequence; + } + + public String toString() { + return sequence + ":" + tokenInfo; + } + } + + public static final Set INITIAL_PLACE_LABELS; + public static final Set FINAL_PLACE_LABELS; + + static { + INITIAL_PLACE_LABELS = new TreeSet<>(); + INITIAL_PLACE_LABELS.add("Start"); + INITIAL_PLACE_LABELS.add("Initial"); + INITIAL_PLACE_LABELS.add("I"); + FINAL_PLACE_LABELS = new TreeSet<>(); + FINAL_PLACE_LABELS.add("End"); + FINAL_PLACE_LABELS.add("Final"); + FINAL_PLACE_LABELS.add("F"); + } + + + private LinkedList tokens = new LinkedList(); + private Token lookahead = null; + private StochasticNet net; + private Map nodeLookup = new HashMap<>(); + + public void addToNet(StochasticNet net, String netText) { + tokenize(netText); + this.net = net; + parse(); + } + + public StochasticNet createNet(String label, String netText) { + StochasticNet net = new StochasticNetImpl(label); + nodeLookup = new HashMap<>(); + addToNet(net,netText); + return net; + } + + /** + * + * Returns an AcceptingPetriNet with one initial and one final place marked. Initial and final + * markings are determined by labeling convention but will only be applied where places have the + * correct edge properties, ie, only outgoing for initial places, only incoming for final. + * + * Naming conventions for initial places, in order of checking: Start, Initial, I. + * + * Naming conventions for final places, in order of checking: End, Final, F. + * + * @param label + * @param netText + * @return + */ + public AcceptingPetriNet createAcceptingNet(String label, String netText) { + StochasticNet net = new StochasticNetImpl(label); + nodeLookup = new HashMap<>(); + addToNet(net,netText); + return markInitialFinalPlaces(net); + } + + + public AcceptingPetriNet markInitialFinalPlaces(StochasticNet net) { + Set initialCandidates = new TreeSet<>(); + Set finalCandidates = new TreeSet<>(); + for (Place place: net.getPlaces()) { + if ( INITIAL_PLACE_LABELS.contains(place.getLabel()) + && net.getInEdges(place).isEmpty() ) + { + initialCandidates.add(place); + }else { + if (FINAL_PLACE_LABELS.contains(place.getLabel()) + && net.getOutEdges(place).isEmpty()) + { + finalCandidates.add(place); + } + } + } + Marking initialMarking = markPlaceFromCandidates(initialCandidates, INITIAL_PLACE_LABELS); + Marking finalMarking = markPlaceFromCandidates(finalCandidates, FINAL_PLACE_LABELS); + return new AcceptingPetriNetImpl(net,initialMarking,finalMarking); + } + + private Marking markPlaceFromCandidates(Set initialCandidates, Set identifyingLabels) { + Marking resultMarking = new Marking(); + for (String initLabel: identifyingLabels) { + for (Place initPlace: initialCandidates) { + if (initLabel.equals(initPlace.getLabel())){ + resultMarking.add(initPlace); + break; + } + } + } + return resultMarking; + } + + public AcceptingPetriNet createAcceptingNetArgs(String label, String ... specs) { + if (specs.length == 0) { + throw new RuntimeException("Cannot create empty Accepting Petri Net"); + } + AcceptingPetriNet anet = createAcceptingNet(label,specs[0]); + for (int i=1; iPetrinet in anet is a + * StochasticNet. + * + * @param anet + * @param netText + */ + public void addToAcceptingNet(AcceptingPetriNet anet, String netText) { + net = (StochasticNet)anet.getNet(); + addToNet(net,netText); + anet = markInitialFinalPlaces(net); + } + + public StochasticNet createNetArgs(String label, String ... specs) { + if (specs.length == 0) { + return new StochasticNetImpl(label); + } + StochasticNet net = createNet(label, specs[0]); + for (int i=1; i expectedClass) { + PetrinetNode existing = nodeLookup.get(label); + if (existing != null) + if (!(expectedClass.isInstance(existing))) { + throw new RuntimeException("New node " + label + " duplicates existing node of wrong type"); + } + return existing; + } + + + private void edge() { + if (lookahead.tokenInfo != TokenInfo.EDGE) + throw new RuntimeException("Expected ->, but found " + lookahead ); + nextToken(); + } + + + private Place place() { + String label = lookahead.sequence; + Place place = checkExistingPlace(label); + if (place == null) { + place = net.addPlace(label); + nodeLookup.put(label,place); + } + nextToken(); + return place; + } + + private void nextToken() { + tokens.pop(); + // at the end of input we return an epsilon token + if (tokens.isEmpty()) + lookahead = new Token(TokenInfo.TERMINAL, ""); + else + lookahead = tokens.getFirst(); + } + +} \ No newline at end of file diff --git a/src/au/edu/qut/prom/helpers/PetrinetExportUtils.java b/src/au/edu/qut/prom/helpers/PetrinetExportUtils.java new file mode 100644 index 0000000..f32613e --- /dev/null +++ b/src/au/edu/qut/prom/helpers/PetrinetExportUtils.java @@ -0,0 +1,122 @@ +package au.edu.qut.prom.helpers; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; + +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.DistributionType; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.plugins.stochasticpetrinet.StochasticNetUtils; + +/** + * Utility methods for exporting petri nets in visualization formats. + * + * Originally adapted from + * org.processmining.models.graphbased.directed.petrinet.impl.ToStochasticNet. + * + * @author burkeat + * + */ +public class PetrinetExportUtils { + + private static final double EPSILON = 0.0001; + private static final String LINE_SEP = "\n"; + + private static int checkId(PetrinetNode node, Map idMapping, int currentCounter) { + if (!idMapping.containsKey(node)) { + idMapping.put(node, String.valueOf("id" + (currentCounter++))); + } + return currentCounter; + } + + + /** + * Originally adopted from exportPN2DOT method from the EventToActivityMatcher plugin + * + * @param net + * @author Thomas Baier, Andreas Rogge-Solti + */ + public static String convertPetrinetToDOT(Petrinet net) { + String lsep = System.getProperty("line.separator"); + + String resultString = "digraph G { " + lsep; + resultString += "ranksep=\".3\"; fontsize=\"14\"; remincross=true; margin=\"0.0,0.0\"; fontname=\"Arial\";rankdir=\"LR\";" + lsep; + resultString += "edge [arrowsize=\"0.5\"];\n"; + resultString += "node [height=\".2\",width=\".2\",fontname=\"Arial\",fontsize=\"14\"];\n"; + resultString += "ratio=0.4;" + lsep; + + Map idMapping = new HashMap<>(); + int id = 1; + for (Transition tr : net.getTransitions()) { + String label = tr.getLabel(); + String shape = "shape=\"box\""; + if (tr instanceof TimedTransition) { + TimedTransition tt = (TimedTransition) tr; + label += "\\n" + StochasticNetUtils.printDistribution(tt.getDistribution()); + if (tt.getDistributionType().equals(DistributionType.IMMEDIATE)) { + shape += ",margin=\"0, 0.1\""; + } + double weight = tt.getWeight(); + if (weight > 0.0d ) { + if ( Math.abs( Math.round(weight) - weight ) < EPSILON ){ + label += "\\n" + String.format("%d", Math.round(weight)); + }else { + label += "\\n" + String.format("%.3f", weight); + } + } + } + if (tr.isInvisible()) { + shape += ",color=\"black\",fontcolor=\"white\""; + } + id = checkId(tr, idMapping, id); + resultString += idMapping.get(tr) + " [" + shape + ",label=\"" + label + "\",style=\"filled\"];" + lsep; + } + + + // Places + for (Place place : net.getPlaces()) { + id = checkId(place, idMapping, id); + resultString += idMapping.get(place) + " [shape=\"circle\",label=\"\"];" + lsep; + } + + // Edges + for (PetrinetEdge edge : net.getEdges()) { + id = checkId(edge.getSource(), idMapping, id); + id = checkId(edge.getTarget(), idMapping, id); + + String edgeString = idMapping.get(edge.getSource()) + " -> " + idMapping.get(edge.getTarget()); + resultString += edgeString + lsep; + } + + resultString += "}"; + + return resultString; + } + + /** + * Small files only - reads in one hit + * + * @param filePath + * @return + * @throws IOException + */ + public static String petriNetFragmentToDOT(String filePath) throws IOException + { + byte[] encoded = Files.readAllBytes( Paths.get( filePath )); + String text = new String(encoded,StandardCharsets.UTF_8); + PetriNetFragmentParser parser = new PetriNetFragmentParser(); + String[] lines = text.split(LINE_SEP); + StochasticNet net = parser.createNetArgs(filePath, lines); + return convertPetrinetToDOT(net); + } + +} diff --git a/src/au/edu/qut/prom/helpers/StochasticPetriNetUtils.java b/src/au/edu/qut/prom/helpers/StochasticPetriNetUtils.java new file mode 100644 index 0000000..9c28bcb --- /dev/null +++ b/src/au/edu/qut/prom/helpers/StochasticPetriNetUtils.java @@ -0,0 +1,276 @@ +package au.edu.qut.prom.helpers; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.processmining.models.graphbased.directed.AbstractDirectedGraph; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.graphbased.directed.utils.GraphIterator; +import org.processmining.models.graphbased.directed.utils.GraphIterator.EdgeAcceptor; +import org.processmining.models.graphbased.directed.utils.GraphIterator.NodeAcceptor; +import org.processmining.models.semantics.petrinet.Marking; + +public class StochasticPetriNetUtils { + + private static Logger LOGGER = LogManager.getLogger(); + + private static final double EPSILON = 0.00001d; + + public static boolean areEqual(StochasticNet o1, StochasticNet o2) { + if (o1 == null && o2 == null) + return true; + if (o1 == null || o2 == null) + return false; + Map o1PlaceMap = o1.getPlaces().stream().collect( + Collectors.toMap(Place::getLabel, Function.identity())); + for (Place p: o2.getPlaces()) { + Place o1Place = o1PlaceMap.get(p.getLabel()); + if (!areEqual(o1Place,p)) { + LOGGER.debug("Not equal: places {} != {}",p, o1Place); + return false; + } + } + Map o1TransitionMap = o1.getTransitions().stream().collect( + Collectors.toMap(Transition::getLabel, Function.identity())); + for (Transition t: o2.getTransitions()) { + Transition o1Transition = o1TransitionMap.get(t.getLabel()); + if (o1Transition instanceof TimedTransition) { + if (!areEqual((TimedTransition)o1Transition,(TimedTransition)t)) { + LOGGER.debug("Not equal: transitions {} != {}",t,o1Transition); + return false; + } + }else { + if (!areEqual(o1Transition,t)) + return false; + } + } + return true; + } + + public static boolean areEqual(TimedTransition t1, TimedTransition t2) { + if (t1 == null && t2 == null) + return true; + if (t1 == null || t2 == null) + return false; + + if ( Math.abs( t1.getWeight() - t2.getWeight() ) > EPSILON + || (!t1.getDistributionType().equals(t2.getDistributionType() ) ) ) + { + return false; + } + return areEqual( (PetrinetNode)t1, t2); + + } + + public static boolean areEqual(PetrinetNode p1, PetrinetNode p2) { + if (p1 == null && p2 == null) + return true; + if (p1 == null || p2 == null) + return false; + if (! p1.getLabel().equals(p2.getLabel())) + return false; + return areEqual( p1.getGraph().getInEdges(p1), + p2.getGraph().getInEdges(p2)); + + } + + + public static boolean areEqual(Collection> edges1, + Collection> edges2) { + if (edges1.size() != edges2.size()) + return false; + Map edgeMap1 = edges1.stream().collect( + Collectors.toMap(p -> p.getSource().getLabel(), + p -> p.getTarget().getLabel()) ); + for (PetrinetEdge edge: edges2) { + String target = edgeMap1.get(edge.getSource().getLabel()); + if (! target.equals( edge.getTarget().getLabel() ) ) + return false; + } + return true; + } + + public static Collection findAllSuccessors(Transition transition) { + + final NodeAcceptor nodeAcceptor = new NodeAcceptor() { + public boolean acceptNode(PetrinetNode node, int depth) { + return ((depth != 0) && (node instanceof Transition) ); + } + }; + + Collection transitions = GraphIterator.getDepthFirstSuccessors(transition, transition.getGraph(), + new EdgeAcceptor>() { + + public boolean acceptEdge(PetrinetEdge edge, + int depth) { + return !nodeAcceptor.acceptNode(edge.getSource(), depth); + } + }, nodeAcceptor); + + return Arrays.asList(transitions.toArray(new Transition[0])); + } + + public static Collection findAllPredecessors(Transition transition) { + + final NodeAcceptor nodeAcceptor = new NodeAcceptor() { + public boolean acceptNode(PetrinetNode node, int depth) { + return ((depth != 0) && (node instanceof Transition) ); + } + }; + + Collection transitions = GraphIterator.getDepthFirstPredecessors(transition, + transition.getGraph(), + new EdgeAcceptor>() { + + public boolean acceptEdge(PetrinetEdge edge, + int depth) { + return !nodeAcceptor.acceptNode(edge.getTarget(), depth); + } + }, nodeAcceptor); + + return Arrays.asList(transitions.toArray(new Transition[0])); + } + + + public static Collection findAllSiblings(Transition transition) { + Collection transitions = new HashSet(); + AbstractDirectedGraph> net = + transition.getGraph(); + Collection> incomingEdges = + net.getInEdges(transition); + for (PetrinetEdge edge: incomingEdges) { + Collection> incomingSiblingEdges = net.getOutEdges(edge.getSource()); + for (PetrinetEdge edgeSibling: incomingSiblingEdges) { + transitions.add( edgeSibling.getTarget() ); + } + } + return Arrays.asList(transitions.toArray(new Transition[0])); + } + + public static Collection predecessors(Place place) { + Collection result = new HashSet(); + for (PetrinetEdge edge: place.getGraph().getInEdges(place)) { + result.add((Transition)edge.getSource()); + } + return result; + } + + public static Collection successors(Place place) { + Collection result = new HashSet(); + for (PetrinetEdge edge: place.getGraph().getOutEdges(place)) { + result.add((Transition)edge.getTarget()); + } + return result; + } + + public static Collection predecessors(Transition transition) { + Collection result = new HashSet<>(); + for (PetrinetEdge edge: transition.getGraph().getInEdges(transition)) { + result.add((Place)edge.getSource()); + } + return result; + } + + public static Collection successors(Transition transition) { + Collection result = new HashSet<>(); + for (PetrinetEdge edge: transition.getGraph().getOutEdges(transition)) { + result.add((Place)edge.getTarget()); + } + return result; + } + + /** + * Pre: unique label equivalence for places between the two nets. This is needed because + * a number of valid and interesting nets don't have input places with no incoming edges, ie, + * they are not WorkflowNets. InductiveMiner and other miners can produce such nets under + * important edge cases. + * + * An earlier version of this checked for net.getInEdges(newPlace).isEmpty(). + * + * @param initialMarking + * @param net + * @return + */ + public static Marking findEquivalentInitialMarking(Marking initialMarking, StochasticNet net) { + Marking newMarking = new Marking(); + for (Place oldPlace: initialMarking) { + for (Place newPlace: net.getPlaces()) { + if (oldPlace.getLabel().equals(newPlace.getLabel() )) { + newMarking.add(newPlace); + return newMarking; + } + } + } + return newMarking; + } + + public static Set findEquivalentFinalMarkings(Set finalMarkings, StochasticNet net) { + Set newMarkings = new HashSet<>(); + for (Marking finalMarking: finalMarkings) { + Marking newMarking = new Marking(); + for (Place oldPlace: finalMarking) { + for (Place newPlace: net.getPlaces()) { + if (oldPlace.getLabel().equals(newPlace.getLabel()) + && net.getOutEdges(newPlace).isEmpty() ) { + newMarking.add(newPlace); + } + } + } + newMarkings.add(newMarking); + } + return newMarkings; + } + + /** + * StochasticPetriNet2StochasticDeterministicFiniteAutomatonPlugin.guessInitialMarking() by Leemans + * + * @param net + * @return + */ + public static Marking guessInitialMarking(Petrinet net) { + Marking result = new Marking(); + for (Place p : net.getPlaces()) { + if (net.getInEdges(p).isEmpty()) { + result.add(p); + } + } + return result; + } + + /** + * Health warning - this simply finds places with only incoming arcs. It will behave + * for sound workflow nets, but may be quite different from the set of all possible final + * markings given particular initial markings. + * + * @param finalMarkings + * @param net + * @return + */ + public static Set guessFinalMarkingsAsIfJustFinalPlaces(Petrinet net) { + Set newMarkings = new HashSet<>(); + Marking newMarking = new Marking(); + for (Place place: net.getPlaces()) { + if ( net.getOutEdges(place).isEmpty() && + !net.getInEdges(place).isEmpty() ) + { + newMarking.add(place); + } + } + newMarkings.add(newMarking); + return newMarkings; + } + +} diff --git a/src/au/edu/qut/stochasticweightestimationplugins/EstimatorPluginConfiguration.java b/src/au/edu/qut/stochasticweightestimationplugins/EstimatorPluginConfiguration.java new file mode 100644 index 0000000..612598b --- /dev/null +++ b/src/au/edu/qut/stochasticweightestimationplugins/EstimatorPluginConfiguration.java @@ -0,0 +1,106 @@ +package au.edu.qut.stochasticweightestimationplugins; + +import java.awt.GridBagConstraints; +import java.awt.GridBagLayout; + +import javax.swing.JComboBox; +import javax.swing.JComponent; +import javax.swing.JFrame; +import javax.swing.JLabel; +import javax.swing.JPanel; + +import org.deckfour.xes.model.XLog; +import org.processmining.plugins.InductiveMiner.ClassifierChooser; + +import au.edu.qut.pm.spn_estimator.ActivityPairLHEstimator; +import au.edu.qut.pm.spn_estimator.ActivityPairRHEstimator; +import au.edu.qut.pm.spn_estimator.AlignmentEstimator; +import au.edu.qut.pm.spn_estimator.BillClintonEstimator; +import au.edu.qut.pm.spn_estimator.FrequencyEstimator; +import au.edu.qut.pm.spn_estimator.LogSourcedWeightEstimator; +import au.edu.qut.pm.spn_estimator.MeanScaledActivityPairRHEstimator; +import au.edu.qut.xes.helpers.DelimitedTraceToXESConverter; + +/** + * + * @author burkeat + * + */ +public class EstimatorPluginConfiguration extends JPanel { + + private static final long serialVersionUID = 1L; + + private static final String[] ESTIMATOR_LABELS; + private static final LogSourcedWeightEstimator[] ESTIMATORS; + + private static final int DEFAULT_ESTIMATOR = 3; + static { + ESTIMATOR_LABELS = new String[6]; + ESTIMATOR_LABELS[0] = "Frequency Estimator"; + ESTIMATOR_LABELS[1] = "LH Activity-Pair Estimator"; + ESTIMATOR_LABELS[2] = "RH Activity-Pair Estimator"; + ESTIMATOR_LABELS[3] = "Scaled RH Activity-Pair Estimator"; + ESTIMATOR_LABELS[4] = "Fork Distributed Estimator"; + ESTIMATOR_LABELS[5] = "Alignment Estimator"; + ESTIMATORS = new LogSourcedWeightEstimator[6]; + ESTIMATORS[0] = new FrequencyEstimator(); + ESTIMATORS[1] = new ActivityPairLHEstimator(); + ESTIMATORS[2] = new ActivityPairRHEstimator(); + ESTIMATORS[3] = new MeanScaledActivityPairRHEstimator(); + ESTIMATORS[4] = new BillClintonEstimator(); + ESTIMATORS[5] = new AlignmentEstimator(); + } + + private JComboBox estimatorComboBox ; + + public EstimatorPluginConfiguration(XLog log) { + super(new GridBagLayout()); + GridBagConstraints constraints = new GridBagConstraints(); + final JLabel estimatorLabel = new JLabel("Weight Estimator"); + constraints.gridx = 0; constraints.gridy = 0; constraints.ipadx = 10; constraints.anchor = GridBagConstraints.LINE_START; + add(estimatorLabel, constraints); + estimatorComboBox = new JComboBox(ESTIMATOR_LABELS); + constraints.gridx = 2; constraints.gridy = 0; constraints.anchor = GridBagConstraints.LINE_END; + add(estimatorComboBox, constraints); + final JLabel classifierLabel = new JLabel("Event Classifier"); + constraints.gridx = 0; constraints.gridy = 1; constraints.ipadx = 10; constraints.anchor = GridBagConstraints.LINE_START; + add(classifierLabel, constraints); + ClassifierChooser classifierChooser = new ClassifierChooser(log); + constraints.gridx = 2; constraints.gridy = 1; constraints.anchor = GridBagConstraints.LINE_END; + add(classifierChooser, constraints); + } + + public LogSourcedWeightEstimator getEstimator() { + int selection = estimatorComboBox.getSelectedIndex(); + if (selection >= 0) { + return ESTIMATORS[selection]; + } + return ESTIMATORS[DEFAULT_ESTIMATOR]; + } + + + // Test method + private static void createAndShowGUI() { + JFrame frame = new JFrame("Test"); + frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); + //Create and set up the content pane. + XLog log = new DelimitedTraceToXESConverter().convertTextArgs("a b","b c"); + JComponent newContentPane = new EstimatorPluginConfiguration(log); + newContentPane.setOpaque(true); //content panes must be opaque + frame.setContentPane(newContentPane); + //Display the window. + frame.pack(); + frame.setVisible(true); + } + + // Test method + public static void main(String[] args) { + javax.swing.SwingUtilities.invokeLater(new Runnable() { + public void run() { + createAndShowGUI(); + } + }); + } + + +} diff --git a/src/au/edu/qut/stochasticweightestimationplugins/SPDWeightEstimatorPlugin.java b/src/au/edu/qut/stochasticweightestimationplugins/SPDWeightEstimatorPlugin.java new file mode 100644 index 0000000..1373c2c --- /dev/null +++ b/src/au/edu/qut/stochasticweightestimationplugins/SPDWeightEstimatorPlugin.java @@ -0,0 +1,139 @@ +package au.edu.qut.stochasticweightestimationplugins; + +import org.deckfour.uitopia.api.event.TaskListener.InteractionResult; +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.classification.XEventNameClassifier; +import org.deckfour.xes.model.XLog; +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.acceptingpetrinet.models.impl.AcceptingPetriNetImpl; +import org.processmining.contexts.uitopia.UIPluginContext; +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.framework.packages.PackageManager.Canceller; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginLevel; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.plugins.InductiveMiner.efficienttree.EfficientTree; +import org.processmining.plugins.InductiveMiner.efficienttree.EfficientTreeReduce.ReductionFailedException; +import org.processmining.plugins.InductiveMiner.efficienttree.UnknownTreeNodeException; +import org.processmining.plugins.inductiveminer2.logs.IMLog; +import org.processmining.plugins.inductiveminer2.mining.InductiveMiner; +import org.processmining.plugins.inductiveminer2.plugins.InductiveMinerPlugin; +import org.processmining.plugins.inductiveminer2.variants.MiningParametersIMInfrequent; + +import au.edu.qut.pm.spn_estimator.LogSourcedWeightEstimator; +import au.edu.qut.pm.spn_estimator.MeanScaledActivityPairRHEstimator; +import au.edu.qut.prom.helpers.StochasticPetriNetUtils; + +@Plugin(name = "Mine Stochastic Petri net with estimators", + level = PluginLevel.Regular, + returnLabels = {"GSPN" }, + returnTypes = { StochasticNet.class }, + parameterLabels = { "Log", "Petri Net" }, + userAccessible = true, + help = SPDWeightEstimatorPlugin.TEXT) +public class SPDWeightEstimatorPlugin { + + public static final String AFFILIATION = "Queensland University of Technology"; + public static final String AUTHOR = "Adam Burke, Sander Leemans, Moe Thandar Wynn"; + public static final String EMAIL = "at.burke@qut.edu.au"; + public static final String TEXT = "Produce a GSPN with immediate transitions from an input log and Petri Net control model.\n" + + "The algorithms implemented here are detailed in \n" + + "Burke, Leemans and Wynn - Stochastic Process Discovery By Weight Estimation (2020)"; + + private static final String DEFAULT_MINER = "Inductive Miner"; + private static final String DEFAULT_ESTIMATOR = "Mean-Scaled RH Activity-Pair Estimator"; + + private static XEventNameClassifier defaultClassifier() { + return new XEventNameClassifier(); + } + + private static MeanScaledActivityPairRHEstimator defaultEstimator() { + return new MeanScaledActivityPairRHEstimator(); + } + + private static String getDefaultMinerName() { + return DEFAULT_MINER; + } + + + @UITopiaVariant(affiliation = AFFILIATION, author = AUTHOR, email = EMAIL, + uiLabel = "Mine Stochastic Petri net from log with estimator", + uiHelp = "Use " + DEFAULT_ESTIMATOR + " and " + DEFAULT_MINER + ". " + TEXT) + @PluginVariant(variantLabel = "Mine Stochastic Petri net from log with " + DEFAULT_ESTIMATOR + " and " + DEFAULT_MINER + ")", + requiredParameterLabels = {0}) + public static StochasticNet mineSPNFromLogWithDefaults(final PluginContext context, XLog log) { + try { + return mineSPNFromLogWithEstimator(context, log, + defaultEstimator(), defaultClassifier()); + } catch (Exception e) { + context.log(e); + } + return null; + } + + @UITopiaVariant(affiliation = AFFILIATION, author = AUTHOR, email = EMAIL, + uiHelp="Mine Stochastic Petri net with selected estimator. " + TEXT) + @PluginVariant(variantLabel = "Mine Stochastic Petri net with selected estimator." + TEXT, + requiredParameterLabels = {0, 1} ) + public static StochasticNet mineGUISPNWithEstimator(final UIPluginContext context, XLog log, Petrinet pnet ) { + EstimatorPluginConfiguration estConfig = new EstimatorPluginConfiguration(log); + InteractionResult interaction = context.showConfiguration("Configure stochastic weight estimation", estConfig ); + if (interaction != InteractionResult.CONTINUE) { + context.getFutureResult(0).cancel(false); + return null; + } + LogSourcedWeightEstimator estimator = estConfig.getEstimator(); + Marking initialMarking = StochasticPetriNetUtils.guessInitialMarking(pnet); + AcceptingPetriNet apnet = new AcceptingPetriNetImpl(pnet, initialMarking); + StochasticNet resultNet = mineSPNWithEstimator(context, apnet, log, estimator, defaultClassifier()); + return resultNet; + } + + + public static StochasticNet mineSPNWithEstimator(final PluginContext context, AcceptingPetriNet apnet, XLog log, + LogSourcedWeightEstimator estimator, XEventClassifier classifier) + { + context.log("Mining with estimator " + estimator.getReadableID() + "..."); + StochasticNet resultNet = estimator.estimateWeights(apnet, log, classifier); + return resultNet; + } + + + public static StochasticNet mineSPNFromLogWithEstimator(final PluginContext context, XLog log, + LogSourcedWeightEstimator estimator, XEventClassifier classifier) + throws Exception + { + context.log("Mining control flow from log with " + getDefaultMinerName()); + AcceptingPetriNet apnet = mineWithDefaultMiner(context, log, classifier); + context.log("Mining with estimator " + estimator.getReadableID() + "..."); + StochasticNet resultNet = estimator.estimateWeights(apnet, log, classifier); + return resultNet; + } + + + public static AcceptingPetriNet mineWithDefaultMiner(final PluginContext context, XLog log, XEventClassifier classifier) + throws UnknownTreeNodeException, ReductionFailedException + { + context.log("Using classifier " + classifier.getClass()); + MiningParametersIMInfrequent parameters = new MiningParametersIMInfrequent(); + parameters.setClassifier(classifier); + IMLog imlog = parameters.getIMLog(log); + context.log("Starting inductive miner ..."); + EfficientTree tree = InductiveMiner.mineEfficientTree(imlog, parameters, new Canceller() { + public boolean isCancelled() { + return context.getProgress().isCancelled(); + } + }); + AcceptingPetriNet pnet = InductiveMinerPlugin.postProcessTree2PetriNet(tree, new Canceller() { + public boolean isCancelled() { + return context.getProgress().isCancelled(); + } + }); + return pnet; + } + +} diff --git a/src/au/edu/qut/stochasticweightestimationplugins/StochasticWeightEstimatorPlugin.java b/src/au/edu/qut/stochasticweightestimationplugins/StochasticWeightEstimatorPlugin.java new file mode 100644 index 0000000..083ce53 --- /dev/null +++ b/src/au/edu/qut/stochasticweightestimationplugins/StochasticWeightEstimatorPlugin.java @@ -0,0 +1,138 @@ +package au.edu.qut.stochasticweightestimationplugins; + +import org.deckfour.uitopia.api.event.TaskListener.InteractionResult; +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.classification.XEventNameClassifier; +import org.deckfour.xes.model.XLog; +import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +import org.processmining.acceptingpetrinet.models.impl.AcceptingPetriNetImpl; +import org.processmining.contexts.uitopia.UIPluginContext; +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.framework.packages.PackageManager.Canceller; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginLevel; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.plugins.InductiveMiner.efficienttree.EfficientTree; +import org.processmining.plugins.InductiveMiner.efficienttree.EfficientTreeReduce.ReductionFailedException; +import org.processmining.plugins.InductiveMiner.efficienttree.UnknownTreeNodeException; +import org.processmining.plugins.inductiveminer2.logs.IMLog; +import org.processmining.plugins.inductiveminer2.mining.InductiveMiner; +import org.processmining.plugins.inductiveminer2.plugins.InductiveMinerPlugin; +import org.processmining.plugins.inductiveminer2.variants.MiningParametersIMInfrequent; +import org.processmining.slpnminer.models.StochasticNetImpl; + +import au.edu.qut.pm.spn_estimator.LogSourcedWeightEstimator; +import au.edu.qut.pm.spn_estimator.MeanScaledActivityPairRHEstimator; +import au.edu.qut.prom.helpers.StochasticPetriNetUtils; + +@Plugin(name = "Disc Stochastic Weight with Estimator", + level = PluginLevel.Regular, + returnLabels = {"GSPN" }, + returnTypes = { StochasticNetImpl.class }, + parameterLabels = { "Log", "Petri Net" }, + userAccessible = true, + help = SPDWeightEstimatorPlugin.TEXT) +public class StochasticWeightEstimatorPlugin { + + public static final String AFFILIATION = "Queensland University of Technology"; + public static final String AUTHOR = "Adam Burke, Sander Leemans, Moe Thandar Wynn"; + public static final String EMAIL = "at.burke@qut.edu.au"; + public static final String TEXT = "Produce a GSPN with immediate transitions from an input log and Petri Net control model.\n" + + "The algorithms implemented here are detailed in \n" + + "Burke, Leemans and Wynn - Stochastic Process Discovery By Weight Estimation (2020)"; + + private static final String DEFAULT_MINER = "Inductive Miner"; + private static final String DEFAULT_ESTIMATOR = "Mean-Scaled RH Activity-Pair Estimator"; + + private static XEventNameClassifier defaultClassifier() { + return new XEventNameClassifier(); + } + + private static MeanScaledActivityPairRHEstimator defaultEstimator() { + return new MeanScaledActivityPairRHEstimator(); + } + + private static String getDefaultMinerName() { + return DEFAULT_MINER; + } + + + @UITopiaVariant(affiliation = AFFILIATION, author = AUTHOR, email = EMAIL, + uiLabel = "Mine Stochastic Petri net from log with estimator", + uiHelp = "Use " + DEFAULT_ESTIMATOR + " and " + DEFAULT_MINER + ". " + TEXT) + @PluginVariant(variantLabel = "Mine Stochastic Petri net from log with " + DEFAULT_ESTIMATOR + " and " + DEFAULT_MINER + ")", + requiredParameterLabels = {0}) + public static StochasticNetImpl mineSPNFromLogWithDefaults(final PluginContext context, XLog log) { + try { + return mineSPNFromLogWithEstimator(context, log, + defaultEstimator(), defaultClassifier()); + } catch (Exception e) { + context.log(e); + } + return null; + } + + @UITopiaVariant(affiliation = AFFILIATION, author = AUTHOR, email = EMAIL, + uiHelp="Mine Stochastic Petri net with selected estimator. " + TEXT) + @PluginVariant(variantLabel = "Mine Stochastic Petri net with selected estimator." + TEXT, + requiredParameterLabels = {0, 1} ) + public static StochasticNetImpl mineGUISPNWithEstimator(final UIPluginContext context, XLog log, Petrinet pnet ) { + EstimatorPluginConfiguration estConfig = new EstimatorPluginConfiguration(log); + InteractionResult interaction = context.showConfiguration("Configure stochastic weight estimation", estConfig ); + if (interaction != InteractionResult.CONTINUE) { + context.getFutureResult(0).cancel(false); + return null; + } + LogSourcedWeightEstimator estimator = estConfig.getEstimator(); + Marking initialMarking = StochasticPetriNetUtils.guessInitialMarking(pnet); + AcceptingPetriNet apnet = new AcceptingPetriNetImpl(pnet, initialMarking); + StochasticNetImpl resultNet = mineSPNWithEstimator(context, apnet, log, estimator, defaultClassifier()); + return resultNet; + } + + + public static StochasticNetImpl mineSPNWithEstimator(final PluginContext context, AcceptingPetriNet apnet, XLog log, + LogSourcedWeightEstimator estimator, XEventClassifier classifier) + { + context.log("Mining with estimator " + estimator.getReadableID() + "..."); + estimator.estimateWeights(apnet, log, classifier); + return estimator.getResult(); + } + + + public static StochasticNetImpl mineSPNFromLogWithEstimator(final PluginContext context, XLog log, + LogSourcedWeightEstimator estimator, XEventClassifier classifier) + throws Exception + { + AcceptingPetriNet apnet = mineWithDefaultMiner(context, log, classifier); + estimator.estimateWeights(apnet, log, classifier); + StochasticNetImpl resultNet = estimator.getResult(); + return resultNet; + } + + + public static AcceptingPetriNet mineWithDefaultMiner(final PluginContext context, XLog log, XEventClassifier classifier) + throws UnknownTreeNodeException, ReductionFailedException + { + context.log("Using classifier " + classifier.getClass()); + MiningParametersIMInfrequent parameters = new MiningParametersIMInfrequent(); + parameters.setClassifier(classifier); + IMLog imlog = parameters.getIMLog(log); + context.log("Starting inductive miner ..."); + EfficientTree tree = InductiveMiner.mineEfficientTree(imlog, parameters, new Canceller() { + public boolean isCancelled() { + return context.getProgress().isCancelled(); + } + }); + AcceptingPetriNet pnet = InductiveMinerPlugin.postProcessTree2PetriNet(tree, new Canceller() { + public boolean isCancelled() { + return context.getProgress().isCancelled(); + } + }); + return pnet; + } + +} diff --git a/src/au/edu/qut/xes/helpers/DelimitedTraceToXESConverter.java b/src/au/edu/qut/xes/helpers/DelimitedTraceToXESConverter.java new file mode 100644 index 0000000..e13e5bf --- /dev/null +++ b/src/au/edu/qut/xes/helpers/DelimitedTraceToXESConverter.java @@ -0,0 +1,139 @@ +package au.edu.qut.xes.helpers; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; + +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XAttributeMap; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.deckfour.xes.model.impl.XAttributeLiteralImpl; +import org.deckfour.xes.model.impl.XAttributeMapImpl; +import org.deckfour.xes.model.impl.XEventImpl; +import org.deckfour.xes.model.impl.XLogImpl; +import org.deckfour.xes.model.impl.XTraceImpl; + +/** + * Helper methods for converting a simple delimited file to an XES log. Intended mainly for concise, + * expressive test data. Expected syntax for input files is described in documentation for + * {@link #convertText(String, String, String)}. + * + * @author burkeat + * + */ +public class DelimitedTraceToXESConverter { + + public static final String DEFAULT_EVENT_DELIMITER = " "; + public static final String DEFAULT_TRACE_DELIMITER = "\n"; + + private static final String XES_CONCEPT_NAME = "concept:name"; + + public DelimitedTraceToXESConverter() { + } + + /** + * Convert a space delimited event log with one trace on each line + * using {@link #convertText(String, String, String)} + * + * @param traces + * @return + */ + public XLog convertText(String traces) { + return convertText(traces,DEFAULT_EVENT_DELIMITER,DEFAULT_TRACE_DELIMITER); + } + + /** + * Convert a trace sequence with one trace in each vararg. Each trace is a + * sequence of space-delimited events per {@link #convertText(String, String, String)}. + * + * @param traces + * @return + */ + public XLog convertTextArgs(String ... traces) { + XAttributeMap attrMap = new XAttributeMapImpl(); + XLog result = new XLogImpl(attrMap); + for (String line: traces) { + if ("".equals(line)) + break; + XTrace trace = convertSingleTrace(DEFAULT_EVENT_DELIMITER, line); + result.add(trace); + } + return result; + } + + /** + * Convert a simple delimited file to an XES log. This is most useful for test data as it does + * not allow for the filtering of columns. It does allow multiple events per line, making + * for more concise and readable test data files. + * + * Files are of the form + * + * LOG :: TRACE {TRACE_DELIMITER TRACE} + * TRACE :: EVENT {EVENT_DELIMITER EVENT} + * EVENT :: + * + * @param traces + * @param eventDelimiter + * @param traceDelimiter + * @return + */ + public XLog convertText(String traces, String eventDelimiter, String traceDelimiter) { + XAttributeMap attrMap = new XAttributeMapImpl(); + XLog result = new XLogImpl(attrMap); + for (String line: traces.split(traceDelimiter)) { + if ("".equals(line)) + break; + XTrace trace = convertSingleTrace(eventDelimiter, line); + result.add(trace); + } + return result; + } + + private XTrace convertSingleTrace(String eventDelimiter, String line) { + XAttributeMap traceAttrMap = new XAttributeMapImpl(); + XTrace trace = new XTraceImpl(traceAttrMap); + for (String eventLabel: line.split(eventDelimiter)) { + if ("".equals(eventLabel)) + break; + XAttributeMap eventAttrMap = new XAttributeMapImpl(); + XAttribute attr = new XAttributeLiteralImpl(XES_CONCEPT_NAME,eventLabel); + eventAttrMap.put(XES_CONCEPT_NAME, attr); + XEvent event = new XEventImpl(eventAttrMap); + trace.add(event); + } + return trace; + } + + /** + * + * + * @param filePath + * @return + */ + public XLog convertFile(String filePath) + throws IOException + { + return convertFile(filePath,DEFAULT_EVENT_DELIMITER,DEFAULT_TRACE_DELIMITER); + } + + /** + * Small files only. UTF-8 encoding assumed. + * + * @param filePath + * @param eventDelimiter + * @param traceDelimiter + * @return + */ + public XLog convertFile(String filePath, String eventDelimiter, String traceDelimiter) + throws IOException + { + byte[] encoded = Files.readAllBytes( Paths.get( filePath )); + return convertText( new String(encoded,StandardCharsets.UTF_8) ); + } + + + +} diff --git a/src/au/edu/qut/xes/helpers/TargetFunction.java b/src/au/edu/qut/xes/helpers/TargetFunction.java new file mode 100644 index 0000000..285447d --- /dev/null +++ b/src/au/edu/qut/xes/helpers/TargetFunction.java @@ -0,0 +1,502 @@ +package au.edu.qut.xes.helpers; + +import cern.jet.random.engine.MersenneTwister; +import de.congrace.exp4j.Calculable; + +import org.apache.commons.math4.legacy.analysis.MultivariateFunction; +import org.apache.commons.math4.legacy.optim.InitialGuess; +import org.apache.commons.math4.legacy.optim.MaxEval; +import org.apache.commons.math4.legacy.optim.SimpleBounds; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.GoalType; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.ObjectiveFunction; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.PopulationSize; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.Sigma; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.noderiv.CMAESOptimizer; +import org.apache.commons.rng.UniformRandomProvider; +import org.apache.commons.rng.simple.RandomSource; +import org.apache.commons.math3.analysis.UnivariateFunction; + +import java.util.*; + + +public class TargetFunction { + + public static double getUEMSC(double[] x){ + + String prob1 = "(t11*t12*t8)/(t11*t12*t7+t11*t7*t7+t11*t12*t8+t11*t7*t8+t12*t7*t8+t7*t7*t8+t12*t8*t8+t7*t8*t8)"; + String prob2 = "(t11*t12*t8)/(t11*t11*t12+t11*t12*t12+t11*t11*t7+t11*t12*t7+t11*t12*t8+t12*t12*t8+t11*t7*t8+t12*t7*t8)"; + String prob3 = "(t12*t8)/(t11*t11+t11*t12+t11*t8+t12*t8)"; + String prob4 = "(t11*t7*t8)/(t11*t11*t12+t11*t12*t12+t11*t11*t7+t11*t12*t7+t11*t12*t8+t12*t12*t8+t11*t7*t8+t12*t7*t8)"; + String prob5 = "(t11*t7)/(t11*t7+t11*t8+t7*t8+t8*t8)"; + String prob6 = "(t11*t7*t8)/(t11*t12*t7+t11*t7*t7+t11*t12*t8+t11*t7*t8+t12*t7*t8+t7*t7*t8+t12*t8*t8+t7*t8*t8)"; + + HashMap strToDouble = new HashMap<>(); + strToDouble.put("t12", x[0]); + strToDouble.put("t11", x[1]); + strToDouble.put("t8", x[2]); + strToDouble.put("t7", x[3]); + for(String s:strToDouble.keySet()) { + System.out.println(s+" "+strToDouble.get(s)); + + } +// System.out.println(converStringToMathExp(prob1, strToDouble)); +// System.out.println(converStringToMathExp(prob2, strToDouble)); +// System.out.println(converStringToMathExp(prob3, strToDouble)); +// System.out.println(converStringToMathExp(prob4, strToDouble)); + + return 1 - Math.max(0.00625 - converStringToMathExp(prob1, strToDouble), 0) + - Math.max(0.0375 - converStringToMathExp(prob2, strToDouble), 0) + - Math.max(0.375 - converStringToMathExp(prob3, strToDouble), 0) + - Math.max(0.3375- converStringToMathExp(prob4, strToDouble), 0) + - Math.max(0.1875 - converStringToMathExp(prob5, strToDouble), 0) + - Math.max(0.05625 - converStringToMathExp(prob6, strToDouble), 0); + } + +// public static double getEntropy(double[] x){ +// return -0.3 * (Math.log(x[0]/(x[0]+x[1]+x[4])) / Math.log(2)) +// -0.3 * (Math.log(x[1] * x[2]/((x[0]+x[1]+x[4])*(x[2]+x[3]))) / Math.log(2)) +// -0.3 * (Math.log(x[1] * x[3]/((x[0]+x[1]+x[4])*(x[2]+x[3]))) / Math.log(2)) +// -0.1 * (Math.log(x[4] /(x[0]+x[1]+x[4])) / Math.log(2)); +// } +// +// public static double getJS(double[] x){ + +// double p1 = 0.3; +// double p2 = 0.3; +// double p3 = 0.3; +// double p4 = 0.1; +// double q1 = x[0]/(x[0]+x[1]+x[4]); +// double q2 = (x[1] * x[2])/((x[0]+x[1]+x[4])*(x[2]+x[3])); +// double q3 = (x[1] * x[3])/((x[0]+x[1]+x[4])*(x[2]+x[3])); +// double q4 = x[4]/(x[0]+x[1]+x[4]); +// double m1 = 0.5*(p1+q1); +// double m2 = 0.5*(p2+q2); +// double m3 = 0.5*(p3+q3); +// double m4 = 0.5*(p4+q4); +// +// +// return +// 0.3 * Math.log(0.3/m1) + q1 * Math.log(q1 /m1)+ +// 0.3 * Math.log(0.3/m2) + q2 * Math.log(q2 /m2)+ +// 0.3 * Math.log(0.3/m3) + q3 * Math.log(q3 /m3)+ +// 0.1 * Math.log(0.1/m4) + q4 * Math.log(q4 /m4); +// +// } + + + public static void main(String[] args) { + +// numerator += x[0]; +// denominator += x[0]; + + MultivariateFunction fUEMSC = new MultivariateFunction() { + public double value(double[] x) { + return getUEMSC(x); + } + }; +// +// MultivariateFunction fEntropy = new MultivariateFunction() { +// public double value(double[] x) { +// return getEntropy(x); +// } +// }; + +// MultivariateFunction fJS = new MultivariateFunction() { +// public double value(double[] x) { +// return getJS(x); +// } +// }; +// +// double[] lowerBound = new double[]{0, 0, 0, 0}; +// double[] upperBound = new double[]{1, 1, 1, 1}; +// +// System.out.println("\nuEMSC"); +// BOBYQAOptimizer optim1 = new BOBYQAOptimizer(10); +// PointValuePair result2 = optim1.optimize( +// new MaxEval(100000), +// new ObjectiveFunction(fUEMSC), +// GoalType.MAXIMIZE, +// new SimpleBounds(lowerBound, upperBound), +// new InitialGuess(new double[] {0.1,0.1,0.1,0.1}) +// ); +// System.out.println(result2.getPoint()[0]); +// System.out.println(result2.getPoint()[1]); +// System.out.println(result2.getPoint()[2]); +// System.out.println(result2.getPoint()[3]); + + + int modelTransitionNum = 4; + double[] lowerBound = new double[modelTransitionNum]; + double[] upperBound = new double[modelTransitionNum]; + double[] initGuess = new double[modelTransitionNum]; + double[] sigma = new double[modelTransitionNum]; + Arrays.fill(lowerBound, 0.0001); + Arrays.fill(upperBound, 1.0000); + Arrays.fill(initGuess, 0.0001); + Arrays.fill(sigma, 0.1); + UniformRandomProvider rngG = RandomSource.MT_64.create(); + CMAESOptimizer optimizer = new CMAESOptimizer( + 1000000, + 0, + true, + modelTransitionNum, + 100, + rngG, + true, + null); + + double[] result2 = optimizer.optimize( + new MaxEval(1000000), + new ObjectiveFunction(fUEMSC), + GoalType.MAXIMIZE, + new PopulationSize((int) (4+3*Math.log(modelTransitionNum))), + new Sigma(sigma), + new InitialGuess(initGuess), + new SimpleBounds(lowerBound, upperBound)).getPoint(); + + System.out.println(result2[0]); + System.out.println(result2[1]); + System.out.println(result2[2]); + System.out.println(result2[3]); +// BOBYQAOptimizer optim2 = new BOBYQAOptimizer(11); +// PointValuePair result2 = optim2.optimize( +// new MaxEval(100000), +// new ObjectiveFunction(fEntropy), +// GoalType.MINIMIZE, +// new SimpleBounds(lowerBound, upperBound), +// new InitialGuess(new double[] {1,1,1,1}) +// ); +// System.out.println(result2.getPoint()[0]); +// System.out.println(result2.getPoint()[1]); +// System.out.println(result2.getPoint()[2]); +// System.out.println(result2.getPoint()[3]); +// +// System.out.println("\njs"); +// BOBYQAOptimizer optim3 = new BOBYQAOptimizer(9); +// PointValuePair result3 = optim3.optimize( +// new MaxEval(100000), +// new ObjectiveFunction(fJS), +// GoalType.MINIMIZE, +// new SimpleBounds(lowerBound, upperBound), +// new InitialGuess(new double[] {1,1,1,1}) +// ); +// System.out.println(result3.getPoint()[0]); +// System.out.println(result3.getPoint()[1]); +// System.out.println(result3.getPoint()[2]); +// System.out.println(result3.getPoint()[3]); + + +// int maxIterations = 200000; +// double stopFitness = 0; //Double.NEGATIVE_INFINITY; +// boolean isActiveCMA = true; +// int diagonalOnly = 0; +// int checkFeasableCount = 1; +// RandomGenerator random = new Well19937c(); +// boolean generateStatistics = false;// +// OptimizationData sigma = new CMAESOptimizer.Sigma(new double[] { +// (upperBound[0] - lowerBound[0]), +// (upperBound[0] - lowerBound[0]), +// (upperBound[0] - lowerBound[0]), +// (upperBound[0] - lowerBound[0]), +// (upperBound[0] - lowerBound[0])}); +// OptimizationData popSize = new CMAESOptimizer.PopulationSize((int) (4 + Math.floor(3 * Math.log(2)))); +// +// // construct solver +// ConvergenceChecker checker = new SimpleValueChecker(1e-6, 1e-10); +// +// CMAESOptimizer opt = new CMAESOptimizer(maxIterations, stopFitness, isActiveCMA, diagonalOnly, +// checkFeasableCount, random, generateStatistics, checker); +// PointValuePair pair = opt.optimize(new InitialGuess( +// new double[] {1,1,1,1,1}), +// new ObjectiveFunction(fEntropy), +// GoalType.MINIMIZE, new SimpleBounds(lowerBound, upperBound), +// sigma, popSize, +// new MaxIter(maxIterations), new MaxEval(maxIterations * 2)); +// System.out.println(pair.getPoint()[0]); +// System.out.println(pair.getPoint()[1]); +// System.out.println(pair.getPoint()[2]); +// System.out.println(pair.getPoint()[3]); +// System.out.println(pair.getPoint()[4]); + + } + + static public double converStringToMathExp(String calculateString, HashMap strToDouble) { + return calculateInversePolandExpression(getInversePolandExpression(calculateString),strToDouble); + } + + static public double converStringToMathExp(String calculateString) { + return calculateInversePolandExpression(getInversePolandExpression2(calculateString)); + } + + static public double converStringToMathExp( + Map constantMap, String calculateString) { + double result = 0; + for (String str : constantMap.keySet()) { + calculateString = calculateString.replaceAll(str, + constantMap.get(str)); + } + result = calculateInversePolandExpression(getInversePolandExpression(calculateString)); + return result; + } + + + static private double calculateInversePolandExpression( + List inversePolandExpression) { + double result = 0; + Stack calculateStack = new Stack(); + for (String str : inversePolandExpression) { + if (str.equals("+") || str.equals("-") || str.equals("*") + || str.equals("/")) { + + double t1 = Double.valueOf(calculateStack.pop()); + double t2 = Double.valueOf(calculateStack.pop()); + result = simpleCalculate(t2, t1, str); + calculateStack.push(result); + } else { + calculateStack.push(Double.valueOf(str)); + } + } +// System.out.println(String.valueOf(result)); + return result; + } + + static private double calculateInversePolandExpression( + List inversePolandExpression, + HashMap strToDouble) { + double result = 0; + Stack calculateStack = new Stack(); + for (String str : inversePolandExpression) { + if (str.equals("+") || str.equals("-") || str.equals("*") + || str.equals("/")) { + // do the calculation for two variables. + double p1 = calculateStack.pop(); + double p2 = calculateStack.pop(); + result = simpleCalculate(p2,p1,str); + calculateStack.push(result); + } else { +// System.out.println("get the str:" + str); + if(strToDouble.containsKey(str)){ + calculateStack.push(strToDouble.get(str)); + } + else{ + calculateStack.push(Double.valueOf(str)); + } + } + } + + return result; + } + + static private List getInversePolandExpression( + String normalExpression) { + List inversePolandExpression = new ArrayList(); + char[] normalChararray = (normalExpression + "$").toCharArray(); + // + Stack signStack = new Stack(); + List> signStackList = new ArrayList>(); + signStackList.add(signStack); + // + int level = 0; + + int pointPosition = 0; + double tempNumber = 0; + String tempVar =""; + boolean isInInteger = true; + + for (int i = 0; i < normalChararray.length; i++) { + + char tempChar = normalChararray[i]; + // + if (tempChar == 't') { + tempVar = "t"; + for (int j = i+1; j < normalChararray.length; j++) { + if (normalChararray[j] >= '0' && normalChararray[j] <= '9'){ + tempVar = tempVar.concat(String.valueOf(normalChararray[j])); + continue; + } + inversePolandExpression.add(tempVar); + i = j-1; + break; + } + } + else if (tempChar >= '0' && tempChar <= '9') { + if (isInInteger) { + tempNumber = tempNumber * 10 + (int) (tempChar - 48); + } + // ? + else { + tempNumber += (double) (tempChar - 48) + * Math.pow(0.1, i - pointPosition); + } + tempVar = tempVar.concat(String.valueOf(tempChar)); + } + // ? + else if (tempChar == '.') { + isInInteger = false; + pointPosition = i; + } + // + else if (tempChar == '+' || tempChar == '-' || tempChar == '*' + || tempChar == '/' || tempChar == '$') { + // + isInInteger = true; + // ? + + if (tempNumber > 0) { + inversePolandExpression.add(String.valueOf(tempNumber)); + } + + // 0 + tempNumber = 0; + // ??? + if ((tempChar == '+') || (tempChar == '-') + || tempChar == '$') { + + while (!signStackList.get(level).isEmpty()) { + // + inversePolandExpression.add(signStackList + .get(level).pop()); + } + } + // ? + + signStackList.get(level).push(tempChar + ""); + + } else if (tempChar == '(') { + signStack = new Stack(); + signStackList.add(signStack); + level++; + } else if (tempChar == ')') { + // + isInInteger = true; + // ? + + + if (tempNumber > 0) { + inversePolandExpression.add(String.valueOf(tempNumber)); + } + + // 0 + tempNumber = 0; + // ??? + + while (!signStackList.get(level).isEmpty()) { + // + inversePolandExpression.add(signStackList.get(level) + .pop()); + + } + level--; + } + } +// System.out.println(inversePolandExpression); + + return inversePolandExpression; + } + + static private List getInversePolandExpression2( + String normalExpression) { + List inversePolandExpression = new ArrayList(); + char[] normalChararray = (normalExpression + "$").toCharArray(); + // + Stack signStack = new Stack(); + List> signStackList = new ArrayList>(); + signStackList.add(signStack); + // + int level = 0; + + int pointPosition = 0; + double tempNumber = 0; + boolean isInInteger = true; + + for (int i = 0; i < normalChararray.length; i++) { + char tempChar = normalChararray[i]; + // + if (tempChar >= '0' && tempChar <= '9') { + // + if (isInInteger) { + tempNumber = tempNumber * 10 + (int) (tempChar - 48); + } + // ? + else { + tempNumber += (double) (tempChar - 48) + * Math.pow(0.1, i - pointPosition); + } + + } + // ? + else if (tempChar == '.') { + isInInteger = false; + pointPosition = i; + } + // + else if (tempChar == '+' || tempChar == '-' || tempChar == '*' + || tempChar == '/' || tempChar == '$') { + // + isInInteger = true; + // ? + if (tempNumber > 0) { + inversePolandExpression.add(String.valueOf(tempNumber)); + } + // 0 + tempNumber = 0; + // ??? + if ((tempChar == '+') || (tempChar == '-') + || tempChar == '$') { + + while (!signStackList.get(level).isEmpty()) { + // + inversePolandExpression.add(signStackList + .get(level).pop()); + } + } + // ? + + signStackList.get(level).push(tempChar + ""); + + } else if (tempChar == '(') { + signStack = new Stack(); + signStackList.add(signStack); + level++; + } else if (tempChar == ')') { + // + isInInteger = true; + // ? + if (tempNumber > 0) { + inversePolandExpression.add(String.valueOf(tempNumber)); + } + + // 0 + tempNumber = 0; + // ??? + + while (!signStackList.get(level).isEmpty()) { + // + inversePolandExpression.add(signStackList.get(level) + .pop()); + + } + level--; + } + } +// System.out.println(inversePolandExpression); + return inversePolandExpression; + } + + + static private double simpleCalculate(double x, double y, String sign) { + double result = 0; + if (sign.equals("+")) { + result = x + y; + } else if (sign.equals("-")) { + result = x - y; + } else if (sign.equals("*")) { + result = x * y; + } else if (sign.equals("/")) { + result = x / y; + } + return result; + + } + +} diff --git a/src/au/edu/qut/xes/helpers/XESLogUtils.java b/src/au/edu/qut/xes/helpers/XESLogUtils.java new file mode 100644 index 0000000..a946d33 --- /dev/null +++ b/src/au/edu/qut/xes/helpers/XESLogUtils.java @@ -0,0 +1,41 @@ +package au.edu.qut.xes.helpers; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.classification.XEventNameClassifier; +import org.deckfour.xes.model.XLog; + +public class XESLogUtils { + + private static final Logger LOGGER = LogManager.getLogger(); + + public static XEventClassifier detectNameBasedClassifier(XLog log) { + LOGGER.debug( "Detected classifiers: {} ", log.getClassifiers()); + XEventClassifier nameClassifier = new XEventNameClassifier(); + Set nameKeys = new HashSet(); + Collections.addAll(nameKeys, nameClassifier.getDefiningAttributeKeys()); + + for (XEventClassifier classifier: log.getClassifiers()) { + Set classifierKeys = new HashSet(); + Collections.addAll(classifierKeys, classifier.getDefiningAttributeKeys()); + if (nameKeys.equals(classifierKeys)){ + return classifier; + } + } + for (XEventClassifier classifier: log.getClassifiers()) { + for (String attrKey: classifier.getDefiningAttributeKeys()) { + if (nameKeys.contains(attrKey)) { + return classifier; + } + } + } + return nameClassifier; + } + + +} diff --git a/src/entropic/AbstractEntropyMeasure.java b/src/entropic/AbstractEntropyMeasure.java new file mode 100644 index 0000000..d341391 --- /dev/null +++ b/src/entropic/AbstractEntropyMeasure.java @@ -0,0 +1,174 @@ +package entropic; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Set; + +import org.deckfour.xes.model.XLog; +import org.jbpt.petri.NetSystem; + +import dk.brics.automaton.Automaton; +import entropic.PetriNetChecker; +import entropic.Utils; +import gnu.trove.map.TObjectShortMap; +import gnu.trove.map.custom_hash.TObjectShortCustomHashMap; +import gnu.trove.strategy.HashingStrategy; + +/** + * Calculating entropy for one model + * + * @author akalenkova + * + */ +public abstract class AbstractEntropyMeasure { + + protected Collection limitations = new ArrayList(); + + private Boolean limitationsHold = null; + private Set violetedLimitations = new HashSet(); + + protected Object model = null; + + private Long measureComputationTime = null; + private double measureValue = 0.0; + + private HashMap timesOfLimitationChecks = new HashMap(); + private HashMap resultsOfLimitationChecks = new HashMap(); + + public AbstractEntropyMeasure(Object model) { + this.model = model; + + this.initializeLimitations(); + } + + // initialize limitations of this method + protected abstract void initializeLimitations(); + + public Collection getLimitations() { + return Collections.unmodifiableCollection(this.limitations); + } + + /** + * Check if the given models satisfy limitations of this measure. + * + * @return true if the limitations are satisfied by the model; false otherwise. + */ + public boolean checkLimitations() { + long start = 0; + for (EntropyMeasureLimitation limitation : this.limitations) { + switch (limitation) { + case BOUNDED: + start = System.currentTimeMillis(); + boolean limitationHolds = this.checkBounded(this.model); + if (!limitationHolds) { + violetedLimitations.add(limitation.getDescription()); + } + this.resultsOfLimitationChecks.put(EntropyMeasureLimitation.BOUNDED, + new Boolean(limitationHolds)); + this.timesOfLimitationChecks.put(EntropyMeasureLimitation.BOUNDED, + new Long(System.currentTimeMillis() - start)); + break; + } + } + + boolean hold = true; + for (Boolean b : this.resultsOfLimitationChecks.values()) { + hold &= b.booleanValue(); + } + + if (hold) + this.limitationsHold = Boolean.TRUE; + else + this.limitationsHold = Boolean.FALSE; + + return hold; + } + + private boolean checkBounded(Object model) { + if (model instanceof NetSystem) { + NetSystem sys = (NetSystem) model; + PetriNetChecker netChecker = new PetriNetChecker(sys); + return netChecker.isBounded(); +// sys.loadNaturalMarking(); +// LoLA2ModelChecker lola = new LoLA2ModelChecker("./lola2/win/lola"); +// boolean result = lola.isBounded(sys); +// return result; + } + return true; + } + + /** + * Compute value of this measure. + * + * @return Value of entropy for the given model. + * @throws Exception if limitations of this measure are not satisfied by the given models. + */ + public double computeMeasure() throws Exception { + + if (limitationsHold!=null && !limitationsHold.booleanValue()) { + throw new Exception(String.format("Limitation(s): %s of %s measure are not fulfilled", violetedLimitations, this.getClass().getName())); + } + + HashingStrategy strategy = new HashingStrategy() { + + public int computeHashCode(String object) { + return object.hashCode(); + } + + public boolean equals(String o1, String o2) { + return o1.equals(o2); + } + }; + TObjectShortMap activity2short = new TObjectShortCustomHashMap(strategy, 10, 0.5f, (short) -1); + + System.out.println(String.format("Constructing automaton for retrieved model")); + long start = System.currentTimeMillis(); + if (model instanceof NetSystem) { + model = Utils.constructAutomatonFromNetSystem((NetSystem) model, activity2short); + } else if (model instanceof XLog){ + model = Utils.constructAutomatonFromLog((XLog) model, activity2short); + } + long finish = System.currentTimeMillis(); + System.out.println(String.format("The automaton for model constructed in %s ms.", (finish-start))); + System.out.println(String.format("The number of states: %s", ((Automaton)model).getNumberOfStates())); + System.out.println(String.format("The number of transitions: %s", Utils.numberOfTransitions((Automaton)model))); + + start = System.nanoTime(); + this.measureValue = this.computeMeasureValue(); + this.measureComputationTime = System.nanoTime()-start; + + return this.measureValue; + } + + /** + * Get measure computation time (in nanoseconds). + * + * @return Time spent computing the measure; null if the measure was not computed. + */ + public Long getMeasureComputationTime() { + return measureComputationTime; + } + + public Long getLimitationCheckTime(EntropyMeasureLimitation limitation) { + return this.timesOfLimitationChecks.get(limitation); + } + + public Boolean getLimitationCheckResult(EntropyMeasureLimitation limitation) { + return this.resultsOfLimitationChecks.get(limitation); + } + + /** + * Get value of this measure. + * + * @return Value of this measure; null if the measure was not computed. + */ + public double getMeasureValue() { + return measureValue; + } + + // compute measure value + protected abstract double computeMeasureValue(); +} diff --git a/src/entropic/AbstractQualityMeasure.java b/src/entropic/AbstractQualityMeasure.java new file mode 100644 index 0000000..6730532 --- /dev/null +++ b/src/entropic/AbstractQualityMeasure.java @@ -0,0 +1,207 @@ +package entropic; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Set; + +import org.apache.commons.math3.util.Pair; +import org.deckfour.xes.model.XLog; +import org.jbpt.petri.NetSystem; + +import dk.brics.automaton.Automaton; +import entropic.PetriNetChecker; +import entropic.Utils; +import gnu.trove.map.TObjectShortMap; +import gnu.trove.map.custom_hash.TObjectShortCustomHashMap; +import gnu.trove.strategy.HashingStrategy; + +/** + * Abstract Quality Measure Class + * + * @author Artem Polyvyanyy + * + * @author Anna Kalenkova + */ +public abstract class AbstractQualityMeasure { + + protected Collection limitations = new ArrayList(); + + private Boolean limitationsHold = null; + private Set violetedLimitations = new HashSet(); + + protected Object relevantTraces = null; + protected Object retrievedTraces = null; + + protected boolean bPrecision = true; + protected boolean bRecall = true; + protected boolean bSilent = false; + + private Long measureComputationTime = null; + private Pair measureValue = null; + + private HashMap timesOfLimitationChecks = new HashMap(); + private HashMap resultsOfLimitationChecks = new HashMap(); + + public AbstractQualityMeasure(Object relevantTraces, Object retrievedTraces, boolean bPrecision, boolean bRecall, boolean bSilent) { + this.relevantTraces = relevantTraces; + this.retrievedTraces = retrievedTraces; + this.bPrecision = bPrecision; + this.bRecall = bRecall; + this.bSilent = bSilent; + + this.initializeLimitations(); + } + + // initialize limitations of this method + protected abstract void initializeLimitations(); + + public Collection getLimitations() { + return Collections.unmodifiableCollection(this.limitations); + } + + /** + * Check if the given models satisfy limitations of this measure. + * + * @return true if the limitations are satisfied by the model; false otherwise. + */ + public boolean checkLimitations() { + long start = 0; + for (QualityMeasureLimitation limitation : this.limitations) { + switch (limitation) { + case RETRIEVED_BOUNDED: + start = System.currentTimeMillis(); + boolean limitationHolds = this.checkBounded(this.retrievedTraces); + if (!limitationHolds) { + violetedLimitations.add(limitation.getDescription()); + } + this.resultsOfLimitationChecks.put(QualityMeasureLimitation.RETRIEVED_BOUNDED, + new Boolean(limitationHolds)); + this.timesOfLimitationChecks.put(QualityMeasureLimitation.RETRIEVED_BOUNDED, + new Long(System.currentTimeMillis() - start)); + break; + case RELEVANT_BOUNDED: + start = System.currentTimeMillis(); + limitationHolds = this.checkBounded(this.relevantTraces); + if (!limitationHolds) { + violetedLimitations.add(limitation.getDescription()); + } + this.resultsOfLimitationChecks.put(QualityMeasureLimitation.RELEVANT_BOUNDED, + new Boolean(limitationHolds)); + this.timesOfLimitationChecks.put(QualityMeasureLimitation.RELEVANT_BOUNDED, + new Long(System.currentTimeMillis() - start)); + break; + } + } + + boolean hold = true; + for (Boolean b : this.resultsOfLimitationChecks.values()) { + hold &= b.booleanValue(); + } + + if (hold) + this.limitationsHold = Boolean.TRUE; + else + this.limitationsHold = Boolean.FALSE; + + return hold; + } + + public static boolean checkBounded(Object model) { + if (model instanceof NetSystem) { + NetSystem sys = (NetSystem) model; + PetriNetChecker netChecker = new PetriNetChecker(sys); + return netChecker.isBounded(); +// sys.loadNaturalMarking(); +// LoLA2ModelChecker lola = new LoLA2ModelChecker("./lola2/win/lola"); +// boolean result = lola.isBounded(sys); +// return result; + } + return true; + } + + /** + * Compute value of this measure. + * + * @return Value of this measure for the given models of relevant and retrieved traces. + * @throws Exception if limitations of this measure are not satisfied by the given models. + */ + public Pair computeMeasure() throws Exception { + + if (limitationsHold!=null && !limitationsHold.booleanValue()) { + throw new Exception(String.format("Limitation(s): %s of %s measure are not fulfilled", violetedLimitations, this.getClass().getName())); + } + + HashingStrategy strategy = new HashingStrategy() { + + public int computeHashCode(String object) { + return object.hashCode(); + } + + public boolean equals(String o1, String o2) { + return o1.equals(o2); + } + }; + TObjectShortMap activity2short = new TObjectShortCustomHashMap(strategy, 10, 0.5f, (short) -1); + + System.out.println(String.format("Constructing automaton RET that encodes the retrieved model.")); + long start = System.currentTimeMillis(); + if (retrievedTraces instanceof NetSystem) { + retrievedTraces = Utils.constructAutomatonFromNetSystem((NetSystem) retrievedTraces, activity2short); + } else if (retrievedTraces instanceof XLog){ + retrievedTraces = Utils.constructAutomatonFromLog((XLog) retrievedTraces, activity2short); + } + long finish = System.currentTimeMillis(); + System.out.println(String.format("Automaton RET constructed in %s ms.", (finish-start))); + System.out.println(String.format("Automaton RET has %s states and %s transitions.", ((Automaton)retrievedTraces).getNumberOfStates(), Utils.numberOfTransitions((Automaton)retrievedTraces))); + + System.out.println(String.format("Constructing automaton REL that encodes the relevant model.")); + start = System.currentTimeMillis(); + if (relevantTraces instanceof NetSystem) { + relevantTraces = Utils.constructAutomatonFromNetSystem((NetSystem) relevantTraces, activity2short); + } else if (relevantTraces instanceof XLog){ + relevantTraces = Utils.constructAutomatonFromLog((XLog) relevantTraces, activity2short); + } + finish = System.currentTimeMillis(); + System.out.println(String.format("Automaton REL constructed in %s ms.", (finish-start))); + System.out.println(String.format("Automaton REL has %s states and %s transitions.", ((Automaton)relevantTraces).getNumberOfStates(), Utils.numberOfTransitions((Automaton)relevantTraces))); + + start = System.nanoTime(); + this.measureValue = this.computeMeasureValue(); + this.measureComputationTime = System.nanoTime()-start; + + return this.measureValue; + } + + /** + * Get measure computation time (in nanoseconds). + * + * @return Time spent computing the measure; null if the measure was not computed. + */ + public Long getMeasureComputationTime() { + return measureComputationTime; + } + + public Long getLimitationCheckTime(QualityMeasureLimitation limitation) { + return this.timesOfLimitationChecks.get(limitation); + } + + public Boolean getLimitationCheckResult(QualityMeasureLimitation limitation) { + return this.resultsOfLimitationChecks.get(limitation); + } + + /** + * Get value of this measure. + * + * @return Value of this measure; null if the measure was not computed. + */ + public Pair getMeasureValue() { + return measureValue; + } + + // compute measure value + protected abstract Pair computeMeasureValue(); + +} diff --git a/src/entropic/AbstractStochasticNetSemantics.java b/src/entropic/AbstractStochasticNetSemantics.java new file mode 100644 index 0000000..c59a30c --- /dev/null +++ b/src/entropic/AbstractStochasticNetSemantics.java @@ -0,0 +1,151 @@ +package entropic; + + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; + +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.elements.Arc; +import org.processmining.models.graphbased.directed.petrinet.elements.InhibitorArc; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.ResetArc; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.models.semantics.petrinet.PetrinetExecutionInformation; + +public class AbstractStochasticNetSemantics { + + protected Marking state; + private Collection transitions; + + public AbstractStochasticNetSemantics() { + this(null); + } + + public AbstractStochasticNetSemantics(Marking state) { + this.state = state; + + } + + public void initialize(Collection transitions, Marking state) { + this.transitions = transitions; + setCurrentState(state); + } + + protected Collection getTransitions() { + return Collections.unmodifiableCollection(transitions); + } + + protected boolean isEnabled(Marking state, Marking required, TimedTransition t) { + if (required.isLessOrEqual(state)) { + for (PetrinetEdge e : t.getGraph().getInEdges(t)) { + if (e instanceof InhibitorArc) { + InhibitorArc arc = (InhibitorArc) e; + if (state.occurrences(arc.getSource()) > 0) { + return false; + } + } + } + return true; + } + return false; + } + + public Marking getCurrentState() { + return state; + } + + public void setCurrentState(Marking currentState) { + state = currentState; + } + + public PetrinetExecutionInformation executeExecutableTransition(TimedTransition toExecute) + throws IllegalTransitionException { + Marking required = getRequired(toExecute); + Marking newState = new Marking(state); + if (!isEnabled(state, required, toExecute)) { + throw new IllegalTransitionException(toExecute, newState); + } + Marking produced = getProduced(toExecute); + newState.addAll(produced); + Marking toRemove = getRemoved(toExecute); + newState.removeAll(toRemove); + state = newState; + return new PetrinetExecutionInformation(required, toRemove, produced, toExecute); + } + + public Collection getExecutableTransitions() { + if (state == null) { + return null; + } + // the tokens are divided over the places according to state + Collection enabled = new ArrayList(); + for (TimedTransition trans : getTransitions()) { + + if (isEnabled(state, getRequired(trans), trans)) { + enabled.add(trans); + } + } + return enabled; + } + + protected Marking getRequired(TimedTransition trans) { + Collection> edges = trans.getGraph().getInEdges( + trans); + Marking required = new Marking(); + for (PetrinetEdge e : edges) { + if (e instanceof Arc) { + Arc arc = (Arc) e; + required.add((Place) arc.getSource(), arc.getWeight()); + } + } + return required; + + } + + protected Marking getProduced(TimedTransition trans) { + Collection> edges = trans.getGraph().getOutEdges( + trans); + Marking produced = new Marking(); + for (PetrinetEdge e : edges) { + if (e instanceof Arc) { + Arc arc = (Arc) e; + produced.add((Place) arc.getTarget(), arc.getWeight()); + } + } + + return produced; + + } + + protected Marking getRemoved(TimedTransition trans) { + Collection> edges = trans.getGraph().getInEdges( + trans); + Marking removed = new Marking(getRequired(trans)); + for (PetrinetEdge e : edges) { + if (e instanceof ResetArc) { + ResetArc arc = (ResetArc) e; + removed.add(arc.getSource(), state.occurrences(arc.getSource())); + } + } + return removed; + } + + public String toString() { + return "Regular Semantics"; + } + + public int hashCode() { + return getClass().hashCode(); + } + + public boolean equals(Object o) { + if (o == null) { + return false; + } + return this.getClass().equals(o.getClass()); + } +} diff --git a/src/entropic/CalculateEntropicRelevancePlugin.java b/src/entropic/CalculateEntropicRelevancePlugin.java new file mode 100644 index 0000000..5d18f4c --- /dev/null +++ b/src/entropic/CalculateEntropicRelevancePlugin.java @@ -0,0 +1,153 @@ +package entropic; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; + +import org.deckfour.xes.model.XLog; +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.slpnminer.models.StochasticNetImpl; +import org.processmining.slpnminer.models.StochasticPetrinetSemantics; +import org.processmining.slpnminer.models.StochasticPetrinetSemanticsFactory; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.State; + + +@Plugin(name = "Compute Entropic Relevance (log-model)", + returnLabels = { "entropic relevance value" }, + returnTypes = { String.class}, + parameterLabels = { "slpn", "log"}) +public class CalculateEntropicRelevancePlugin { + + private static final int MAXSTATES = 25000; + + private HashMap stateToId= null; + + private Integer idx; + + @UITopiaVariant(affiliation = UITopiaVariant.EHV, author = "Tian Li", email = "t.li@bpm.rwth-aachen.de", pack = "WeightEstimation") + @PluginVariant(requiredParameterLabels = {0, 1}) + public String computeER(PluginContext context, XLog log, StochasticNetImpl net){ + Marking initialMarking = guessInitialMarking(net); + + stateToId = new HashMap(); + idx = 0; + StochasticPetrinetSemantics semantics = StochasticPetrinetSemanticsFactory.stochasticNetSemantics(StochasticNetImpl.class); + semantics.initialize(net.getTimedTransitions(), initialMarking); + + ReachabilityGraph ts = doBreadthFirst( + net.getLabel(), + initialMarking, + semantics, + MAXSTATES); + + Integer initialState = 0; + List saTransitions = getSATransitions(ts); + + SAutomaton sa = new SAutomaton(saTransitions, initialState); + Map relevance = Relevance.compute(log, sa, false); + Double invertedER = (Double)relevance.get("relevance"); +// return (String) relevance.get("relevance"); + return String.valueOf(1/invertedER); + } + + private List getSATransitions(ReachabilityGraph ts) { + List lsat = new ArrayList(); + + for(State state: ts.getNodes()) { + // for each outgoing edges, get the probability + Double totalOutWeights = 0.0; + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + totalOutWeights = totalOutWeights + t.getWeight(); + } + + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + SATransition trans = new SATransition( + stateToId.get(state.getIdentifier()), + stateToId.get(t.getTarget().getIdentifier()), t.getLabel(), + t.getWeight()/totalOutWeights); + System.out.print("weight: "+ t.getLabel() +" "+t.getWeight()/totalOutWeights); + + lsat.add(trans); + } + } + return lsat; + } + + private Marking guessInitialMarking(StochasticNet net) { + Marking result = new Marking(); + for (Place p : net.getPlaces()) { + if (net.getInEdges(p).isEmpty()) { + result.add(p); + } + } + return result; + } + + + private ReachabilityGraph doBreadthFirst( + String label, Marking state, + Semantics semantics, + int max) { + + ReachabilityGraph ts = new ReachabilityGraph("StateSpace of " + label); + ts.addState(state); + stateToId.put(state,idx); + Queue newStates = new LinkedList(); + newStates.add(state); + do { + newStates.addAll(extend(ts, newStates.poll(), semantics)); + } while (!newStates.isEmpty() && (ts.getStates().size() < max)); + if (!newStates.isEmpty()) { + // This net has been shows to be unbounded on this marking + return null; + } + return ts; + } + + private Set extend(ReachabilityGraph ts, + Marking state, + Semantics semantics) { + Set newStates = new HashSet(); + semantics.setCurrentState(state); + for (TimedTransition t : semantics.getExecutableTransitions()) { + semantics.setCurrentState(state); + try { + semantics.executeExecutableTransition(t); + } catch (IllegalTransitionException e) { + assert (false); + } + Marking newState = semantics.getCurrentState(); + + if (ts.addState(newState)) { + newStates.add(newState); + stateToId.put(newState,idx++); + } + ts.addTransition(state, newState, t.getLocalID(), t.getLabel(), t.isInvisible(), t.getWeight()); + semantics.setCurrentState(state); + } + return newStates; + } + + public static void main(String[] args) throws Exception { + XLog log = XLogReader.openLog("/Applications/Programming/Artem-Entropic-Relevance/jbpt-pm/examples/log2.xes"); + + SAutomaton sa = SAutomaton.readJSON("/Applications/Programming/Artem-Entropic-Relevance/jbpt-pm/examples/automaton.sdfa"); + Map relevance = Relevance.compute(log, sa, false); + System.out.println(1/(Double)relevance.get("relevance")); + } +} \ No newline at end of file diff --git a/src/entropic/CalculateEntropicRelevanceReachability.java b/src/entropic/CalculateEntropicRelevanceReachability.java new file mode 100644 index 0000000..9b049b4 --- /dev/null +++ b/src/entropic/CalculateEntropicRelevanceReachability.java @@ -0,0 +1,147 @@ +package entropic; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; + +import org.deckfour.xes.model.XLog; +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.slpnminer.models.StochasticNetImpl; +import org.processmining.slpnminer.models.StochasticPetrinetSemantics; +import org.processmining.slpnminer.models.StochasticPetrinetSemanticsFactory; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.State; + + +@Plugin(name = "Compute Entropic Relevance stochastic reachability", + returnLabels = { "entropic relevance value" }, + returnTypes = { ReachabilityGraph.class}, + parameterLabels = { "slpn", "log"}) +public class CalculateEntropicRelevanceReachability { + + private static final int MAXSTATES = 25000; + + private HashMap stateToId= null; + + private Integer idx; + + @UITopiaVariant(affiliation = UITopiaVariant.EHV, author = "Tian Li", email = "t.li@bpm.rwth-aachen.de", pack = "WeightEstimation") + @PluginVariant(requiredParameterLabels = {0, 1}) + public ReachabilityGraph computeER(PluginContext context, XLog log, StochasticNetImpl net){ + Marking initialMarking = guessInitialMarking(net); + + stateToId = new HashMap(); + idx = 0; + StochasticPetrinetSemantics semantics = StochasticPetrinetSemanticsFactory.stochasticNetSemantics(StochasticNetImpl.class); + semantics.initialize(net.getTimedTransitions(), initialMarking); + + + ReachabilityGraph ts = doBreadthFirst( + net.getLabel(), + initialMarking, + semantics, + MAXSTATES); + + return ts; + } + + private List getSATransitions(ReachabilityGraph ts) { + List lsat = new ArrayList(); + + for(State state: ts.getNodes()) { + // for each outgoing edges, get the probability + Double totalOutWeights = 0.0; + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + totalOutWeights = totalOutWeights + t.getWeight(); + } + + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + SATransition trans = new SATransition( + stateToId.get(state.getIdentifier()), + stateToId.get(t.getTarget().getIdentifier()), t.getLabel(), + t.getWeight()/totalOutWeights); + System.out.print("weight: "+ t.getLabel() +" "+ + t.getWeight()/totalOutWeights); + lsat.add(trans); + } + } + return lsat; + } + + private Marking guessInitialMarking(StochasticNet net) { + Marking result = new Marking(); + for (Place p : net.getPlaces()) { + if (net.getInEdges(p).isEmpty()) { + result.add(p); + } + } + return result; + } + + + private ReachabilityGraph doBreadthFirst( + String label, Marking state, + Semantics semantics, + int max) { + + ReachabilityGraph ts = new ReachabilityGraph("StateSpace of " + label); + ts.addState(state); + stateToId.put(state,idx); + Queue newStates = new LinkedList(); + newStates.add(state); + do { + newStates.addAll(extend(ts, newStates.poll(), semantics)); + } while (!newStates.isEmpty() && (ts.getStates().size() < max)); + if (!newStates.isEmpty()) { + // This net has been shows to be unbounded on this marking + return null; + } + return ts; + } + + private Set extend(ReachabilityGraph ts, + Marking state, + Semantics semantics) { + Set newStates = new HashSet(); + semantics.setCurrentState(state); + for (TimedTransition t : semantics.getExecutableTransitions()) { + semantics.setCurrentState(state); + try { + semantics.executeExecutableTransition(t); + } catch (IllegalTransitionException e) { + assert (false); + } + Marking newState = semantics.getCurrentState(); + + if (ts.addState(newState)) { + newStates.add(newState); + stateToId.put(newState,idx++); + } + ts.addTransition(state, newState, t.getLocalID(), t.getLabel(), t.isInvisible(), t.getWeight()); + semantics.setCurrentState(state); + } + return newStates; + } + + public static void main(String[] args) throws Exception { + XLog log = XLogReader.openLog("/Applications/Programming/Artem-Entropic-Relevance/jbpt-pm/examples/log2.xes"); + + SAutomaton sa = SAutomaton.readJSON("/Applications/Programming/Artem-Entropic-Relevance/jbpt-pm/examples/automaton.sdfa"); + Map relevance = Relevance.compute(log, sa, false); + System.out.println(1/(Double)relevance.get("relevance")); + } +} \ No newline at end of file diff --git a/src/entropic/EntropyMeasure.java b/src/entropic/EntropyMeasure.java new file mode 100644 index 0000000..6b92e13 --- /dev/null +++ b/src/entropic/EntropyMeasure.java @@ -0,0 +1,34 @@ +package entropic; + +import org.processmining.eigenvalue.MetricsCalculator; + +import dk.brics.automaton.Automaton; + +public class EntropyMeasure extends AbstractEntropyMeasure { + + int numberOfSkips = 0; + + public EntropyMeasure(Object model, int numberOfSkips) { + + super(model); + this.numberOfSkips = numberOfSkips; + } + + @Override + protected void initializeLimitations() { + this.limitations.add(EntropyMeasureLimitation.BOUNDED); + } + + @Override + public double computeMeasureValue() { + System.out.println(); + System.out.println("===================Calculating entropy============================="); + System.out.println(); + + if (model instanceof Automaton) { + double value = MetricsCalculator.calculateEntropy((Automaton)model, "model", false, false, numberOfSkips); + return value; + } + return 0.0; + } +} diff --git a/src/entropic/EntropyMeasureLimitation.java b/src/entropic/EntropyMeasureLimitation.java new file mode 100644 index 0000000..b99f836 --- /dev/null +++ b/src/entropic/EntropyMeasureLimitation.java @@ -0,0 +1,28 @@ +package entropic; + +public enum EntropyMeasureLimitation { + + BOUNDED(1, "The boundness of the model"); + + + private final int limitation; + private final String description; + + EntropyMeasureLimitation(int limitation, String description) { + this.limitation = limitation; + this.description = description; + } + + /** + * Get code of this entropy measure limitation. + * + * @return Code of this entropy measure limitation. + */ + public int getEntropyMeasureLimitationCode() { + return this.limitation; + } + + public String getDescription() { + return this.description; + } +} diff --git a/src/entropic/EntropyPrecisionRecallMeasure.java b/src/entropic/EntropyPrecisionRecallMeasure.java new file mode 100644 index 0000000..a717a12 --- /dev/null +++ b/src/entropic/EntropyPrecisionRecallMeasure.java @@ -0,0 +1,43 @@ +package entropic; + +import org.apache.commons.math3.util.Pair; +import org.processmining.eigenvalue.MetricsCalculator; + +import dk.brics.automaton.Automaton; + +public class EntropyPrecisionRecallMeasure extends AbstractQualityMeasure { + + int skipsRel = 0; // max allowed number of skips in relevant traces + int skipsRet = 0; // max allowed number of skips in retrieved traces + + public EntropyPrecisionRecallMeasure(Object relevantTraces, Object retrievedTraces, int skipsRel, int skipsRet, + boolean bPrecision, boolean bRecall, boolean bSilent) { + super(relevantTraces, retrievedTraces, bPrecision, bRecall, bSilent); + + this.skipsRel = skipsRel; + this.skipsRet = skipsRet; + } + + @Override + protected void initializeLimitations() { + this.limitations.add(QualityMeasureLimitation.RETRIEVED_BOUNDED); + this.limitations.add(QualityMeasureLimitation.RELEVANT_BOUNDED); + } + + @Override + protected Pair computeMeasureValue() { + System.out.println(); + System.out.println("===================Calculating precision and recall============================="); + System.out.println(); + + if ((relevantTraces instanceof Automaton) && (retrievedTraces instanceof Automaton)) { + System.out.println("Both are Automaton"); + + + Pair values = MetricsCalculator.calculate((Automaton)relevantTraces, "REL", (Automaton)retrievedTraces, "RET", false, false, + skipsRel, skipsRet, bPrecision, bRecall, bSilent); + return values; + } + return new Pair(0.0, 0.0); + } +} diff --git a/src/entropic/EventFrequencyBasedBackgroundModel.java b/src/entropic/EventFrequencyBasedBackgroundModel.java new file mode 100644 index 0000000..a6eac10 --- /dev/null +++ b/src/entropic/EventFrequencyBasedBackgroundModel.java @@ -0,0 +1,79 @@ +package entropic; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import org.deckfour.xes.model.XTrace; + +import entropic.SimpleBackgroundModel; + +public class EventFrequencyBasedBackgroundModel +extends SimpleBackgroundModel { + Map freqActionInLog = new HashMap(); + Map> freqActionInTrace = new HashMap>(); + Map tempFreqActionInLog; + boolean nonFittingSubLog; + int lengthOfLog = 0; + + public EventFrequencyBasedBackgroundModel(boolean nonFittingSubLog) { + this.nonFittingSubLog = nonFittingSubLog; + } + + @Override + public void openTrace(XTrace trace) { + super.openTrace(trace); + this.tempFreqActionInLog = new HashMap(); + } + + @Override + public void processEvent(String eventLabel, double probability) { + super.processEvent(eventLabel, probability); + if (!this.nonFittingSubLog) { + this.freqActionInLog.put(eventLabel, this.freqActionInLog.getOrDefault(eventLabel, 0) + 1); + } + this.tempFreqActionInLog.put(eventLabel, this.tempFreqActionInLog.getOrDefault(eventLabel, 0) + 1); + } + + @Override + public void closeTrace(XTrace trace, boolean fitting, Optional finalStateProb) { + super.closeTrace(trace, fitting, finalStateProb); + if (!this.freqActionInTrace.containsKey(this.largeString)) { + this.freqActionInTrace.put(this.largeString, this.tempFreqActionInLog); + } + if (this.nonFittingSubLog && !fitting) { + for (Map.Entry eventLabel : this.tempFreqActionInLog.entrySet()) { + this.freqActionInLog.put(eventLabel.getKey(), this.freqActionInLog.getOrDefault(eventLabel.getKey(), 0) + eventLabel.getValue()); + } + } + } + + protected int actionsInLog(Map freqActionInLog) { + return freqActionInLog.values().stream().mapToInt(i -> i).sum() + this.lengthOfLog; + } + + protected double p(String element, Map freqActionInLog) { + return (double)freqActionInLog.get(element).intValue() / (double)this.actionsInLog(freqActionInLog); + } + + @Override + protected double costBitsUnfittingTraces(String traceId) { + double bits = 0.0; + this.lengthOfLog = this.nonFittingSubLog ? this.totalNumberOfNonFittingTraces : this.totalNumberOfTraces; + for (Map.Entry eventFrequency : this.freqActionInTrace.get(traceId).entrySet()) { + bits -= EventFrequencyBasedBackgroundModel.log2(this.p(eventFrequency.getKey(), this.freqActionInLog)) * (double)eventFrequency.getValue().intValue(); + } + return bits -= EventFrequencyBasedBackgroundModel.log2((double)this.lengthOfLog / (double)this.actionsInLog(this.freqActionInLog)); + } + + @Override + protected double costFrequencyDistribution() { + double bits = 0.0; + this.lengthOfLog = this.nonFittingSubLog ? this.totalNumberOfNonFittingTraces : this.totalNumberOfTraces; + for (String label : this.labels) { + bits += 2.0 * Math.floor(EventFrequencyBasedBackgroundModel.log2(this.freqActionInLog.getOrDefault(label, 0) + 1)) + 1.0; + } + return bits += 2.0 * Math.floor(EventFrequencyBasedBackgroundModel.log2(this.lengthOfLog + 1)) + 1.0; + } +} + + \ No newline at end of file diff --git a/src/entropic/FDAGArc.java b/src/entropic/FDAGArc.java new file mode 100644 index 0000000..3caf611 --- /dev/null +++ b/src/entropic/FDAGArc.java @@ -0,0 +1,34 @@ +package entropic; + +public class FDAGArc { + private Integer from; + private Integer to; + private Integer freq; + + public Integer getFrom() { + return this.from; + } + + public Integer getTo() { + return this.to; + } + + public Integer getFreq() { + return this.freq; + } + + public FDAGArc(Integer from, Integer to, Integer freq) { + this.from = from; + this.to = to; + this.freq = freq; + } + + public String toString() { + return String.format("(%d) - [%d] -> (%d)", this.from, this.freq, this.to); + } + + public String toDot() { + return String.format("\tn%d -> n%d [label=\"%d\"];", this.from, this.to, this.freq); + } +} + diff --git a/src/entropic/FDAGNode.java b/src/entropic/FDAGNode.java new file mode 100644 index 0000000..b7acf67 --- /dev/null +++ b/src/entropic/FDAGNode.java @@ -0,0 +1,34 @@ +package entropic; + +public class FDAGNode { + private Integer id; + private String label; + private Integer freq; + + public Integer getId() { + return this.id; + } + + public String getLabel() { + return this.label; + } + + public Integer getFreq() { + return this.freq; + } + + public FDAGNode(Integer id, String label, Integer freq) { + this.id = id; + this.label = label; + this.freq = freq; + } + + public String toString() { + return "{id=" + this.id + ", label='" + this.label + '\'' + ", freq=" + this.freq + '}'; + } + + public String toDot() { + return String.format("\tn%d [label=\"%s\\n%d\"];", this.id, this.label, this.freq); + } +} + diff --git a/src/entropic/FDAGraph.java b/src/entropic/FDAGraph.java new file mode 100644 index 0000000..e0dad5b --- /dev/null +++ b/src/entropic/FDAGraph.java @@ -0,0 +1,46 @@ +package entropic; + + +import com.google.gson.Gson; + +import entropic.FDAGArc; +import entropic.FDAGNode; + +import java.io.FileReader; +import java.io.PrintStream; +import java.io.Reader; +import java.util.List; + +public class FDAGraph { + private List nodes; + private List arcs; + + public List getNodes() { + return this.nodes; + } + + public List getArcs() { + return this.arcs; + } + + public void toDot(PrintStream out) { + out.println("digraph G {"); + this.nodes.forEach(n -> { + if (n != null) { + out.println(n.toDot()); + } + }); + this.arcs.forEach(a -> { + if (a != null) { + out.println(a.toDot()); + } + }); + out.println("}"); + } + + public static FDAGraph readJSON(String fileName) throws Exception { + Gson gson = new Gson(); + return gson.fromJson((Reader)new FileReader(fileName), FDAGraph.class); + } +} + diff --git a/src/entropic/IModelChecker.java b/src/entropic/IModelChecker.java new file mode 100644 index 0000000..73a5371 --- /dev/null +++ b/src/entropic/IModelChecker.java @@ -0,0 +1,123 @@ +package entropic; + +import java.util.Collection; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.jbpt.petri.IFlow; +import org.jbpt.petri.IMarking; +import org.jbpt.petri.INetSystem; +import org.jbpt.petri.INode; +import org.jbpt.petri.IPlace; +import org.jbpt.petri.ITransition; + +/** + * An interface to a model checker. + * + * @author Artem Polyvyanyy + */ +public interface IModelChecker, N extends INode, P extends IPlace, T extends ITransition, M extends IMarking> { + + /** + * Check if a transition in a given net system is live. + * + * @param sys A net system. + * @param t A transition. + * + * @return true if transition t is live in net system sys; false otherwise. + */ + public boolean isLive(INetSystem sys, T t); + + /** + * Check if a given net system is live. + * + * @param sys A net system. + * + * @return true if net system sys is live; false otherwise. + */ + public boolean isLive(INetSystem sys); + + /** + * Check if a marking is reachable in a given net system. + * The marking is specified as a collection of places, where the multiplicity of a place in the collection denotes the number of tokens at that place. + * + * @param sys A net system. + * @param marking A marking (specified as a collection of places). + * + * @return true if marking is reachable in net system sys; false otherwise. + */ + public boolean isReachable(INetSystem sys, Collection

marking); + + /** + * Check if a place in a given net system is bounded. + * + * @param sys A net system. + * @param p A place. + * + * @return true if place p is bounded in net system sys; false otherwise. + */ + public boolean isBounded(INetSystem sys, P p); + + /** + * Check if a given net system is bounded. + * + * @param sys A net system. + * + * @return true if net system sys is bounded; false otherwise. + */ + public boolean isBounded(INetSystem sys); + + /** + * Check if a given net system is a sound workflow net. + * + * @param sys A net system. + * + * @return true if net system sys is a sound workflow net; false otherwise. + */ + public boolean isSoundWorkflowNet(INetSystem sys); + + /** + * Check if a marking that put at least one token at each of the given places is reachable + * + * @param sys A net system. + * @param places A set of places. + * + * @return true if a marking that puts at least one token at each of the places in set places is reachable; false otherwise. + */ + public boolean canReachMarkingWithAtLeastOneTokenAtEachPlace(INetSystem sys, Set

places); + + /** + * Compute statistics for the state space (a.k.a reachability graph) of a given net system. + * + * @param sys A net system. + * + * @return {@link StateSpaceStatistics} object that contains information on the number of reachable states and state transitions of the reachability graph of net system sys. + */ + public StateSpaceStatistics getStateSpaceStatistics(INetSystem sys); + + /** + * Check a property in temporal logic (LTL or CTL). + * + * @param sys A net system. + * @param property An LTL or CTL property in the LoLA 2.0 format. + * + * @return true if property holds for net system sys; false otherwise. + */ + public boolean check(INetSystem sys, String property); + + + // TODO: The below methods are querying specific and must be removed from this interface. + + public boolean canReachMarkingWithAtLeastOneTokenAtEachPlace(INetSystem sys, Set

places, Set p); //A.P. + + public boolean isReachable(INetSystem sys, Collection

marking, Set p); //A.P. + + public boolean isIndexable(INetSystem sys); + + public boolean isIndexable(INetSystem sys, Set p); //A.P. + + public void setLoLAActive(boolean active); //A.P. + + public AtomicBoolean isLoLAActive(); //A.P. + +} diff --git a/src/entropic/PartialEfficientEntropyMeasure.java b/src/entropic/PartialEfficientEntropyMeasure.java new file mode 100644 index 0000000..30bb193 --- /dev/null +++ b/src/entropic/PartialEfficientEntropyMeasure.java @@ -0,0 +1,30 @@ +package entropic; + +import org.processmining.eigenvalue.MetricsCalculator; + +import dk.brics.automaton.Automaton; + +public class PartialEfficientEntropyMeasure extends AbstractEntropyMeasure { + + public PartialEfficientEntropyMeasure(Object model) { + super(model); + } + + @Override + protected void initializeLimitations() { + this.limitations.add(EntropyMeasureLimitation.BOUNDED); + } + + @Override + protected double computeMeasureValue() { + System.out.println(); + System.out.println("===================Calculating partial entropy efficiently============================="); + System.out.println(); + + if (model instanceof Automaton) { + double value = MetricsCalculator.calculateEntropy((Automaton)model, "model", true, true, 0); + return value; + } + return 0.0; + } +} \ No newline at end of file diff --git a/src/entropic/PartialEfficientEntropyPrecisionRecallMeasure.java b/src/entropic/PartialEfficientEntropyPrecisionRecallMeasure.java new file mode 100644 index 0000000..80c277e --- /dev/null +++ b/src/entropic/PartialEfficientEntropyPrecisionRecallMeasure.java @@ -0,0 +1,29 @@ +package entropic; + +import org.apache.commons.math3.util.Pair; +import org.processmining.eigenvalue.MetricsCalculator; + +import dk.brics.automaton.Automaton; + +public class PartialEfficientEntropyPrecisionRecallMeasure extends AbstractQualityMeasure { + + public PartialEfficientEntropyPrecisionRecallMeasure(Object relevantTraces, Object retrievedTraces, boolean bPrecision, boolean bRecall, boolean bSilent) { + super(relevantTraces, retrievedTraces, bPrecision, bRecall, bSilent); + } + + @Override + protected void initializeLimitations() { + this.limitations.add(QualityMeasureLimitation.RELEVANT_BOUNDED); + this.limitations.add(QualityMeasureLimitation.RETRIEVED_BOUNDED); + } + + @Override + protected Pair computeMeasureValue() { + + if ((relevantTraces instanceof Automaton) && (retrievedTraces instanceof Automaton)) { + Pair values = MetricsCalculator.calculate((Automaton)relevantTraces, "REL", (Automaton)retrievedTraces, "RET", true, true, 0, 0, bPrecision, bRecall, bSilent); + return values; + } + return new Pair(0.0, 0.0); + } +} diff --git a/src/entropic/PartialEntropyMeasure.java b/src/entropic/PartialEntropyMeasure.java new file mode 100644 index 0000000..7b575ca --- /dev/null +++ b/src/entropic/PartialEntropyMeasure.java @@ -0,0 +1,30 @@ +package entropic; + +import org.processmining.eigenvalue.MetricsCalculator; + +import dk.brics.automaton.Automaton; + +public class PartialEntropyMeasure extends AbstractEntropyMeasure { + + public PartialEntropyMeasure(Object model) { + super(model); + } + + @Override + protected void initializeLimitations() { + this.limitations.add(EntropyMeasureLimitation.BOUNDED); + } + + @Override + protected double computeMeasureValue() { + System.out.println(); + System.out.println("===================Calculating partial entropy============================="); + System.out.println(); + + if (model instanceof Automaton) { + double value = MetricsCalculator.calculateEntropy((Automaton)model, "model", true, false, 0); + return value; + } + return 0.0; + } +} diff --git a/src/entropic/PartialEntropyPrecisionRecallMeasure.java b/src/entropic/PartialEntropyPrecisionRecallMeasure.java new file mode 100644 index 0000000..c8c4050 --- /dev/null +++ b/src/entropic/PartialEntropyPrecisionRecallMeasure.java @@ -0,0 +1,29 @@ +package entropic; + +import org.apache.commons.math3.util.Pair; +import org.processmining.eigenvalue.MetricsCalculator; + +import dk.brics.automaton.Automaton; + +public class PartialEntropyPrecisionRecallMeasure extends AbstractQualityMeasure { + + public PartialEntropyPrecisionRecallMeasure(Object relevantTraces, Object retrievedTraces, boolean bPrecision, boolean bRecall, boolean bSilent) { + super(relevantTraces, retrievedTraces, bPrecision, bRecall, bSilent); + } + + @Override + protected void initializeLimitations() { + this.limitations.add(QualityMeasureLimitation.RELEVANT_BOUNDED); + this.limitations.add(QualityMeasureLimitation.RETRIEVED_BOUNDED); + } + + @Override + protected Pair computeMeasureValue() { + + if ((relevantTraces instanceof Automaton) && (retrievedTraces instanceof Automaton)) { + Pair values = MetricsCalculator.calculate((Automaton)relevantTraces, "REL", (Automaton)retrievedTraces, "RET", true, false, 0, 0, bPrecision, bRecall, bSilent); + return values; + } + return new Pair(0.0, 0.0); + } +} diff --git a/src/entropic/PetriNetChecker.java b/src/entropic/PetriNetChecker.java new file mode 100644 index 0000000..f18550c --- /dev/null +++ b/src/entropic/PetriNetChecker.java @@ -0,0 +1,276 @@ +package entropic; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.jbpt.petri.Flow; +import org.jbpt.petri.INetSystem; +import org.jbpt.petri.Marking; +import org.jbpt.petri.Node; +import org.jbpt.petri.Place; +import org.jbpt.petri.Transition; + +/** + * Checking boundedness of Petri net + * + * @author Anna Kalenkova + */ +public class PetriNetChecker implements IModelChecker { + + public static class KarpMillerTree { + private Node root; + + public KarpMillerTree(Marking rootData) { + root = new Node(rootData); + } + + public static class Node { + private Marking data; + private Node parent; + + public Node(Marking data) { + + this.data = data; + + } + public Node getParent() { + + return parent; + } + public Marking getData() { + + return data; + } + public void setParent(Node parent) { + + this.parent=parent; + } + + } + public Node getRoot() { + + return root; + } + } + + private INetSystem net; + + public PetriNetChecker (INetSystem net) { + + this.net = net; + } + + public boolean isBounded() { + + if (net == null) + return false; + + return isBoundedKarpMiller(); + } + + private boolean isBoundedKarpMiller() { + + + Marking initialMarking = new Marking(net); + initialMarking.fromMultiSet(net.getMarking().toMultiSet()); + + Marking startMarking = new Marking(net); + startMarking.fromMultiSet(net.getMarking().toMultiSet()); + + KarpMillerTree tree = new KarpMillerTree(startMarking); + + Set newNodes = new HashSet(); + newNodes.add(tree.getRoot()); + + while(!newNodes.isEmpty()) { + KarpMillerTree.Node curNode = newNodes.iterator().next(); + + Set enabledTransitions = findEnabledTransitions(curNode.getData()); + Marking currentMarking = curNode.getData(); + // Add new markings to the tree + for(Transition enabledTransition: enabledTransitions) { + + net.loadMarking(currentMarking); + net.fire(enabledTransition); + Marking newMarking = new Marking(net); + newMarking.fromMultiSet(net.getMarking().toMultiSet()); + KarpMillerTree.Node newNode = new KarpMillerTree.Node(newMarking); + + Set parents = findParents(curNode); + // If this node is identical to one of the parents, skip it + if(belongsTo(newNode, parents)) { + continue; + } + + // If marking dominates one of the parents' markings, the net is not bounded, return false + if(dominates(newNode, parents)) { + return false; + } + + // Add new node + newNode.setParent(curNode); + newNodes.add(newNode); + } + newNodes.remove(curNode); + } + // Restore marking + net.loadMarking(initialMarking); + return true; + } + + private boolean dominates(KarpMillerTree.Node node, Set otherNodes) { + + Marking nodeMarking = node.getData(); + + for(KarpMillerTree.Node otherNode : otherNodes) { + Marking otherNodeMarking = otherNode.getData(); + if(nodeMarking.toMultiSet().containsAll(otherNodeMarking.toMultiSet())) { + return true; + } + } + return false; + } + + private Set findEnabledTransitions (Marking marking) { + + Set enabledTransitions = new HashSet(); + + for (Transition t : net.getTransitions()) { + Collection incomingEdges = net.getIncomingEdges(t); + boolean enabled = true; + for (Flow incomingEdge : incomingEdges) { + Place place = (Place)incomingEdge.getSource(); + if(!marking.toMultiSet().contains(place)) { + enabled = false; + } + } + if(enabled) { + enabledTransitions.add(t); + } + } + + return enabledTransitions; + } + + private boolean belongsTo(KarpMillerTree.Node node, Set setOfNodes) { + + for (KarpMillerTree.Node nodeFromTheSet : setOfNodes) { + if (nodeFromTheSet.getData().toMultiSet().equals(node.getData().toMultiSet())) { + return true; + } + } + + return false; + } + + private Set findParents(KarpMillerTree.Node node) { + Set parents = new HashSet(); + KarpMillerTree.Node curNode = node; + + while (curNode.getParent()!= null) { + parents.add(curNode.getParent()); + curNode = curNode.getParent(); + } + return parents; + } + + + @Override + public boolean isLive(INetSystem sys, Transition t) { + // TODO Auto-generated method stub + return false; + } + + + @Override + public void setLoLAActive(boolean active) { + // TODO Auto-generated method stub + + } + + @Override + public AtomicBoolean isLoLAActive() { + // TODO Auto-generated method stub + return null; + } + + + @Override + public boolean isLive(INetSystem sys) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isReachable(INetSystem sys, + Collection marking) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isBounded(INetSystem sys, Place p) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isBounded(INetSystem sys) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isSoundWorkflowNet(INetSystem sys) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean canReachMarkingWithAtLeastOneTokenAtEachPlace( + INetSystem sys, Set places) { + // TODO Auto-generated method stub + return false; + } + + @Override + public StateSpaceStatistics getStateSpaceStatistics( + INetSystem sys) { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean check(INetSystem sys, String property) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean canReachMarkingWithAtLeastOneTokenAtEachPlace( + INetSystem sys, Set places, Set p) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isReachable(INetSystem sys, + Collection marking, Set p) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isIndexable(INetSystem sys) { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isIndexable(INetSystem sys, Set p) { + // TODO Auto-generated method stub + return false; + } +} diff --git a/src/entropic/QualityMeasureLimitation.java b/src/entropic/QualityMeasureLimitation.java new file mode 100644 index 0000000..9b3bd5b --- /dev/null +++ b/src/entropic/QualityMeasureLimitation.java @@ -0,0 +1,37 @@ +package entropic; + +/** + * An enumeration of index statuses: + * + * RELEVANT_BOUNDED - the model that described relevant traces must be bounded + * RETRIEVED_BOUNDED - the model that described retrieved traces must be bounded + * + * @author Artem Polyvyanyy, Anna Kalenkova + */ +public enum QualityMeasureLimitation { + + RETRIEVED_BOUNDED(1, "The boundedness of the retrieved model"), + RELEVANT_BOUNDED(2, "The boundedness of the relevant model"); + + + private final int limitation; + private final String description; + + QualityMeasureLimitation(int limitation, String description) { + this.limitation = limitation; + this.description = description; + } + + /** + * Get code of this quality measure limitation. + * + * @return Code of this quality measure limitation. + */ + public int getQualityMeasureLimitationCode() { + return this.limitation; + } + + public String getDescription() { + return this.description; + } +} diff --git a/src/entropic/Relevance.java b/src/entropic/Relevance.java new file mode 100644 index 0000000..75ce74f --- /dev/null +++ b/src/entropic/Relevance.java @@ -0,0 +1,59 @@ +package entropic; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import org.apache.commons.lang3.tuple.Pair; +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import com.google.common.collect.Table; + +public class Relevance { + public static void scanAndProcess(XLog log, SAutomaton automaton, ReplayInformationGatherer infoGatherer) { + Table> transitions = automaton.getTransitions(); + Integer initialState = automaton.getInitialState(); + for (XTrace trace : log) { + Integer curr = initialState; + boolean nonfitting = false; + infoGatherer.openTrace(trace); + for (XEvent event : trace) { + if (event.getAttributes().get("concept:name") == null || event.getAttributes().containsKey("lifecycle:transition") && !((XAttribute)event.getAttributes().get("lifecycle:transition")).toString().toUpperCase().equals("COMPLETE")) continue; + String label = ((XAttribute)event.getAttributes().get("concept:name")).toString(); + double prob = 0.0; + if (!nonfitting && transitions.contains(curr, label)) { + Pair pair = transitions.get(curr, label); + curr = pair.getLeft(); + prob = pair.getRight(); + } else { + nonfitting = true; + } + infoGatherer.processEvent(label, prob); + } + if (!nonfitting && automaton.isFinalState(curr)) { + infoGatherer.closeTrace(trace, true, Optional.of(automaton.getFinalStateProb(curr))); + continue; + } + infoGatherer.closeTrace(trace, false, Optional.empty()); + } + } + + private static Map run(XLog log, SAutomaton automaton, boolean full, SimpleBackgroundModel analyzer) { + Relevance.scanAndProcess(log, automaton, analyzer); + HashMap result = new HashMap(analyzer.computeRelevance(full)); + if (full) { + // empty if block + } + return result; + } + + public static Map compute(XLog log, SAutomaton automaton, boolean full) { + return Relevance.run(log, automaton, full, new SimpleBackgroundModel()); + } + + public static Map computeNew(XLog log, SAutomaton automaton, boolean full, boolean nonFittingSubLog) { + return Relevance.run(log, automaton, full, new EventFrequencyBasedBackgroundModel(nonFittingSubLog)); + } +} \ No newline at end of file diff --git a/src/entropic/ReplayInformationGatherer.java b/src/entropic/ReplayInformationGatherer.java new file mode 100644 index 0000000..aa802bf --- /dev/null +++ b/src/entropic/ReplayInformationGatherer.java @@ -0,0 +1,12 @@ +package entropic; + +import java.util.Optional; +import org.deckfour.xes.model.XTrace; + +public interface ReplayInformationGatherer { + public void openTrace(XTrace var1); + + public void closeTrace(XTrace var1, boolean var2, Optional var3); + + public void processEvent(String var1, double var2); +} diff --git a/src/entropic/SATransition.java b/src/entropic/SATransition.java new file mode 100644 index 0000000..4767ba8 --- /dev/null +++ b/src/entropic/SATransition.java @@ -0,0 +1,36 @@ +package entropic; + +public class SATransition { + private Integer from; + private Integer to; + private String label; + private Double prob; + + public Integer getFrom() { + return this.from; + } + + public Integer getTo() { + return this.to; + } + + public String getLabel() { + return this.label; + } + + public Double getProb() { + return this.prob; + } + + public SATransition(Integer from, Integer to, String label, Double probability) { + this.from = from; + this.to = to; + this.label = label; + this.prob = probability; + } + + public String toString() { + return String.format("(%d) - %s [%10.8f] -> (%d)", this.from, this.label, this.prob, this.to); + } +} + diff --git a/src/entropic/SAutomaton.java b/src/entropic/SAutomaton.java new file mode 100644 index 0000000..6516455 --- /dev/null +++ b/src/entropic/SAutomaton.java @@ -0,0 +1,100 @@ +package entropic; + +import com.google.common.collect.HashBasedTable; +import com.google.common.collect.Table; +import com.google.gson.Gson; +import com.google.gson.stream.JsonReader; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Writer; +import java.lang.reflect.Type; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.tuple.Pair; +import entropic.SATransition; + +public class SAutomaton { + private final Integer initialState; + private final List transitions; + private transient Set states; + private transient Table> transTable; + private transient Map finalStates; + + public SAutomaton(List saTransitions, Integer initialState) { + this.transitions = saTransitions; + this.initialState = initialState; + this.complete(); + } + + private SAutomaton complete() { + return this.complete(1.0E-6); + } + + private SAutomaton complete(double epsilon) { + HashBasedTable> table = HashBasedTable.create(); + HashSet stateSet = new HashSet(); + HashMap outgoingProb = new HashMap(); + HashMap sinkAbsorvingProb = new HashMap(); + for (SATransition stTransition : this.transitions) { + table.put(stTransition.getFrom(), stTransition.getLabel(), Pair.of(stTransition.getTo(), Math.log(stTransition.getProb()))); + stateSet.add(stTransition.getFrom()); + stateSet.add(stTransition.getTo()); + outgoingProb.put(stTransition.getFrom(), outgoingProb.getOrDefault(stTransition.getFrom(), 0.0) + stTransition.getProb()); + } + for (Integer state : stateSet) { + if (outgoingProb.containsKey(state) && !(1.0 - (Double)outgoingProb.get(state) > epsilon)) continue; + sinkAbsorvingProb.put(state, Math.log(1.0 - outgoingProb.getOrDefault(state, 0.0))); + } + this.finalStates = sinkAbsorvingProb; + this.transTable = table; + this.states = stateSet; + return this; + } + + public static SAutomaton of(List saTransitions, Integer initialState) { + return new SAutomaton(saTransitions, initialState); + } + + public Integer getInitialState() { + return this.initialState; + } + + public Table> getTransitions() { + return this.transTable; + } + + public Set getStates() { + return this.states; + } + + public static SAutomaton readJSON(String fileName) throws Exception { + JsonReader reader = new JsonReader(new InputStreamReader((InputStream)new FileInputStream(fileName), "UTF-8")); + Gson gson = new Gson(); + SAutomaton automaton = (SAutomaton)gson.fromJson(reader, (Type)((Object)SAutomaton.class)); + automaton.complete(); + return automaton; + } + + public void toJSON(String filename) throws Exception { + FileWriter writer = new FileWriter(filename); + Gson gson = new Gson(); + IOUtils.write(gson.toJson(this), (Writer)writer); + writer.flush(); + writer.close(); + } + + public boolean isFinalState(Integer state) { + return this.finalStates.containsKey(state); + } + + public double getFinalStateProb(Integer state) { + return this.finalStates.get(state); + } +} + diff --git a/src/entropic/SimpleBackgroundModel.java b/src/entropic/SimpleBackgroundModel.java new file mode 100644 index 0000000..d533401 --- /dev/null +++ b/src/entropic/SimpleBackgroundModel.java @@ -0,0 +1,101 @@ +package entropic; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import org.deckfour.xes.model.XTrace; + +import entropic.ReplayInformationGatherer; + +public class SimpleBackgroundModel +implements ReplayInformationGatherer { + int numberOfEvents = 0; + int totalNumberOfTraces = 0; + int totalNumberOfNonFittingTraces = 0; + Set labels = new HashSet(); + Map traceFrequency = new HashMap(); + Map traceSize = new HashMap(); + Map log2OfModelProbability = new HashMap(); + double lprob = 0.0; + String largeString = ""; + + public static double log2(double x) { + return Math.log(x) / Math.log(2.0); + } + + public static double h0(int accumulated_rho, double totalNumberOfTraces) { + if (accumulated_rho == 0 || (double)accumulated_rho == totalNumberOfTraces) { + return 0.0; + } + double p = (double)accumulated_rho / totalNumberOfTraces; + return -p * SimpleBackgroundModel.log2(p) - (1.0 - p) * SimpleBackgroundModel.log2(1.0 - p); + } + + @Override + public void openTrace(XTrace trace) { + this.lprob = 0.0; + this.largeString = ""; + } + + @Override + public void closeTrace(XTrace trace, boolean fitting, Optional finalStateProb) { + this.traceSize.put(this.largeString, trace.size()); + ++this.totalNumberOfTraces; + if (fitting) { + this.log2OfModelProbability.put(this.largeString, (this.lprob + finalStateProb.get()) / Math.log(2.0)); + } else { + ++this.totalNumberOfNonFittingTraces; + } + this.traceFrequency.put(this.largeString, this.traceFrequency.getOrDefault(this.largeString, 0) + 1); + } + + @Override + public void processEvent(String eventLabel, double probability) { + this.largeString = this.largeString + eventLabel; + ++this.numberOfEvents; + this.labels.add(eventLabel); + this.lprob += probability; + } + + protected double costBitsUnfittingTraces(String traceId) { + return (double)(1 + this.traceSize.get(traceId)) * SimpleBackgroundModel.log2(1 + this.labels.size()); + } + + protected double costFrequencyDistribution() { + return 0.0; + } + + public Map computeRelevance(boolean full) { + int accumulated_rho = 0; + double accumulated_cost_bits = 0.0; + double accumulated_temp_cost_bits = 0.0; + double accumulated_prob_fitting_traces = 0.0; + double costFreqDistribuPerTrace = 0.0; + for (String traceString : this.traceFrequency.keySet()) { + double traceFreq = this.traceFrequency.get(traceString).intValue(); + double cost_bits = 0.0; + double nftrace_cost_bits = 0.0; + if (this.log2OfModelProbability.containsKey(traceString)) { + cost_bits = -this.log2OfModelProbability.get(traceString).doubleValue(); + accumulated_rho = (int)((double)accumulated_rho + traceFreq); + } else { + nftrace_cost_bits = cost_bits = this.costBitsUnfittingTraces(traceString); + } + accumulated_temp_cost_bits += nftrace_cost_bits * traceFreq; + accumulated_cost_bits += cost_bits * traceFreq / (double)this.totalNumberOfTraces; + if (!this.log2OfModelProbability.containsKey(traceString)) continue; + accumulated_prob_fitting_traces += traceFreq / (double)this.totalNumberOfTraces; + } + costFreqDistribuPerTrace = this.costFrequencyDistribution() / (double)this.totalNumberOfTraces; + HashMap result = new HashMap(); + if (full) { + result.put("coverage", accumulated_prob_fitting_traces); + result.put("costOfBackgroundModel", accumulated_temp_cost_bits / (double)this.totalNumberOfTraces); + result.put("costOfFrequencyDistribution", costFreqDistribuPerTrace); + } + result.put("relevance", SimpleBackgroundModel.h0(accumulated_rho, this.totalNumberOfTraces) + accumulated_cost_bits + costFreqDistribuPerTrace); + return result; + } +} \ No newline at end of file diff --git a/src/entropic/StateSpaceStatistics.java b/src/entropic/StateSpaceStatistics.java new file mode 100644 index 0000000..fc020e6 --- /dev/null +++ b/src/entropic/StateSpaceStatistics.java @@ -0,0 +1,34 @@ +package entropic; + +/** + * A data structure to store number of reachable states and state transitions. + * + * @author Artem Polyvyanyy + */ +public class StateSpaceStatistics { + private long nOfStates= 0L; + private long nOfTransitions= 0L; + + protected StateSpaceStatistics(long states, long transitions) { + this.nOfStates = states; + this.nOfTransitions = transitions; + } + + /** + * Get number of states. + * + * @return Number of states. + */ + public long getNumberOfStates() { + return nOfStates; + } + + /** + * Get number of state transitions. + * + * @return Number of state transitions. + */ + public long getNumberOfTransitions() { + return nOfTransitions; + } +} diff --git a/src/entropic/Test.java b/src/entropic/Test.java new file mode 100644 index 0000000..af1284e --- /dev/null +++ b/src/entropic/Test.java @@ -0,0 +1,17 @@ +package entropic; + +import org.deckfour.xes.model.XLog; + +public class Test { + + public static void main(String[] args) throws Exception { + XLog log = XLogReader.openLog("/Applications/Programming/Artem-Entropic-Relevance/jbpt-pm/examples/log2.xes"); + String relevance = ""; + + SAutomaton sa = SAutomaton.readJSON("/Applications/Programming/Artem-Entropic-Relevance/jbpt-pm/examples/automaton.sdfa"); + relevance = Relevance.compute(log, sa, false).toString(); + System.out.println(relevance); + + } + +} diff --git a/src/entropic/Utils.java b/src/entropic/Utils.java new file mode 100644 index 0000000..3ad6d78 --- /dev/null +++ b/src/entropic/Utils.java @@ -0,0 +1,351 @@ +package entropic; + +import java.io.OutputStream; +import java.io.PrintStream; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.jbpt.petri.Flow; +import org.jbpt.petri.IPetriNet; +import org.jbpt.petri.Marking; +import org.jbpt.petri.NetSystem; +import org.jbpt.petri.Node; +import org.jbpt.petri.Place; +import org.jbpt.petri.Transition; +import org.processmining.framework.plugin.ProMCanceller; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; + +import dk.brics.automaton.*; +import gnu.trove.map.TObjectShortMap; + +/** + * Utils for object transformations + * + * @author akalenkova + * + */ +public class Utils { + + private final static String TAU = "tau"; + + /** + * Construct reachability graph of a given net system + * Note that we assume that we have checked that the net system is bounded + * + * @param ns + * @return + */ + public static Automaton constructAutomatonFromNetSystem(NetSystem ns, TObjectShortMap activity2short) { + Map, State> markingToState = new HashMap, State>(); + Set> unprocessedMarkings = new HashSet>(); + + Automaton a = new Automaton(); + boolean containsTauLabels = false; + + // Construct initial state + Collection initialMarking = ns.getMarking().toMultiSet(); + + if ((initialMarking == null) || (initialMarking.size() == 0)) { + initialMarking = deriveInitialMarking(ns); + } + + // Derive final marking + Collection finalMarking = deriveFinalMarking(ns); + +// Collection finalMarking = initialMarking; + + State initialState = new State(); + markingToState.put(initialMarking, initialState); + a.setInitialState(initialState); + if (initialMarking.containsAll(finalMarking) && finalMarking.containsAll(initialMarking)) { + initialState.setAccept(true); + } + unprocessedMarkings.add(initialMarking); + + // Pair of states connected by tau + Set tauPairs = new HashSet(); + + // Construct other states + while (!unprocessedMarkings.isEmpty()) { + Collection curMarking = unprocessedMarkings.iterator().next(); + Set enabledTransitions = retrieveEnabledTransitions(curMarking, ns); + +// System.out.println(enabledTransitions); + for (Transition enabeledTransition : enabledTransitions) { + + Marking marking = new Marking(ns); + marking.fromMultiSet(curMarking); + ns.loadMarking(marking); + + ns.fire(enabeledTransition); + + Collection newMarking = ns.getMarking().toMultiSet(); + State curState = markingToState.get(curMarking); + State newState = markingToState.get(newMarking); + + if (newState == null) { + newState = new State(); + markingToState.put(newMarking, newState); + unprocessedMarkings.add(newMarking); + + if (newMarking.containsAll(finalMarking) && finalMarking.containsAll(newMarking)) { + newState.setAccept(true); + } + } + + char c = (char) Integer.valueOf(enabeledTransition.getLabel().hashCode()).shortValue(); +// System.out.println(c); +// System.out.println(newMarking); +// System.out.println(newState); + // If string is empty (silent) + if (c == '\u0000') { + tauPairs.add(new StatePair(curState, newState)); + } else { + if(enabeledTransition.getLabel().contains(TAU)) { + containsTauLabels = true; + } + activity2short.putIfAbsent(enabeledTransition.getLabel(), (short) activity2short.size()); + dk.brics.automaton.Transition t = new dk.brics.automaton.Transition((char)activity2short.get(enabeledTransition.getLabel()), newState); + curState.addTransition(t); +// System.out.println(a); + } + } + unprocessedMarkings.remove(curMarking); + } + + if(containsTauLabels) { + System.out.println("Note that some transitions contain labels with " + TAU + ". These transitions will not be considered as silent. " + + "To make a transition silent please set an empty label."); + } + + a.addEpsilons(tauPairs); + a.determinize(); +// System.out.println(a); + a.minimize(); + return a; + } + + /** + * Construct prefix tree for a given event log + * + * @param ns + * @return + */ + public static Automaton constructAutomatonFromLog(XLog log, TObjectShortMap activity2short) { + + Automaton a = new Automaton(); + State initialState = new State(); + a.setInitialState(initialState); + + Iterator logIterator = log.iterator(); + + while (logIterator.hasNext()) { + State curState = initialState; + XTrace trace = logIterator.next(); + Iterator traceIterator = trace.iterator(); + while (traceIterator.hasNext()) { + XEvent event = traceIterator.next(); + XAttribute attribute = event.getAttributes().get("concept:name"); + if (attribute != null) { + String label = attribute.toString(); + activity2short.putIfAbsent(label, (short) activity2short.size()); + char c = (char)activity2short.get(label); + boolean alreadyConstructed = false; + Set transitions = curState.getTransitions(); + for (dk.brics.automaton.Transition transition : transitions) { + if ((transition.getMax() == c) && (transition.getMin() == c)) { + curState = transition.getDest(); + alreadyConstructed = true; + } + } + if (!alreadyConstructed) { + State newState = new State(); + dk.brics.automaton.Transition t = new dk.brics.automaton.Transition(c, newState); + curState.addTransition(t); + curState = newState; + } + } + } + curState.setAccept(true); + } + a.minimize(); + return a; + } + + /** + * Constructing system net from a Petri net + * + * @param pn + * @return + */ + public static NetSystem constructNetSystemFromPetrinet(Petrinet pn) { + + Map placesMap + = new HashMap(); + Map transitionsMap + = new HashMap(); + + NetSystem ns = new NetSystem(); + for (org.processmining.models.graphbased.directed.petrinet.elements.Place place : pn.getPlaces()) { + Place newPlace = new Place(); + placesMap.put(place, newPlace); + ns.addPlace(newPlace); + } + for (org.processmining.models.graphbased.directed.petrinet.elements.Transition transition : pn.getTransitions()) { + Transition newTransition = new Transition("", transition.getLabel()); + if(transition.isInvisible()) { + newTransition.setLabel(""); + } + transitionsMap.put(transition, newTransition); + ns.addTransition(newTransition); + } + for(PetrinetEdge edge : pn.getEdges()) { + Object source = edge.getSource(); + Object target = edge.getTarget(); + if ((source instanceof org.processmining.models.graphbased.directed.petrinet.elements.Place) + && (target instanceof org.processmining.models.graphbased.directed.petrinet.elements.Transition)) { + ns.addFlow(placesMap.get((org.processmining.models.graphbased.directed.petrinet.elements.Place)source), + transitionsMap.get((org.processmining.models.graphbased.directed.petrinet.elements.Transition)target)); + } + if ((target instanceof org.processmining.models.graphbased.directed.petrinet.elements.Place) + && (source instanceof org.processmining.models.graphbased.directed.petrinet.elements.Transition)) { + ns.addFlow(transitionsMap.get((org.processmining.models.graphbased.directed.petrinet.elements.Transition)source), + placesMap.get((org.processmining.models.graphbased.directed.petrinet.elements.Place)target)); + } + } + return ns; + } + + /** + * Construct enabled transitions for the current marking + * + * @param marking + * @return + */ + private static Set retrieveEnabledTransitions(Collection marking, NetSystem net) { + + Set enabledTransitions = new HashSet(); + + IPetriNet petriNet = net; + for (Transition transition : petriNet.getTransitions()) { + boolean isEnabled = true; + for (Flow inFlow : petriNet.getIncomingEdges(transition)) { + Place inPlace = (Place)inFlow.getSource(); + if (!marking.contains(inPlace)) { + isEnabled = false; + break; + } + } + if(isEnabled) { + enabledTransitions.add(transition); + } + } + return enabledTransitions; + } + +// /** +// * Derive initial marking from the structure of a given net system +// * (should work for workflow nets, for other types of nets results are to be checked) +// * +// * @param ns +// * @return +// */ +// private static Marking deriveInitialMarking(NetSystem ns) { +// +// Marking initialMarking = new Marking(); +// if (ns != null) { +// for (Place place : ns.getPlaces()) { +// Collection predsessors = ns.getDirectPredecessors(place); +// if ((predsessors == null) || (predsessors.size() == 0)) { +// initialMarking.put(place, 1); +// } else { +// initialMarking.put(place, 0); +// } +// } +// } +// return initialMarking; +// } + + + /** + * Derive final marking from the structure of a given net system + * (should work for workflow nets, for other types of nets results are to be checked) + * + * @param ns + * @return + */ + private static Collection deriveFinalMarking(NetSystem ns) { + + Collection finalMarking = new HashSet(); + if (ns != null) { + for (Place place : ns.getPlaces()) { + Collection successors = ns.getDirectSuccessors(place); + if ((successors == null) || (successors.size() == 0)) { + finalMarking.add(place); + } + } + } + return finalMarking; + } + + /** + * Derive final marking from the structure of a given net system + * (should work for workflow nets, for other types of nets results are to be checked) + * + * @param ns + * @return + */ + private static Collection deriveInitialMarking(NetSystem ns) { + + Collection initialMarking = new HashSet(); + if (ns != null) { + for (Place place : ns.getPlaces()) { + Collection predecessors = ns.getDirectPredecessors(place); + if ((predecessors == null) || (predecessors.size() == 0)) { + initialMarking.add(place); + } + } + } + return initialMarking; + } + + private static PrintStream originalStream = System.out; + + private static PrintStream dummyStream = new PrintStream(new OutputStream(){ + public void write(int b) { + // Nothing + } + }); + + public static void hidePrinting() { + System.setOut(dummyStream); + } + + public static void restorePrinting() { + System.setOut(originalStream); + } + + + public static String numberOfTransitions(Automaton a) { + int c = 0; + for (State s : a.getStates()) { + for (dk.brics.automaton.Transition t : s.getTransitions()) { + char max = t.getMax(); + char min = t.getMin(); + int numberOfChars = max - min + 1; + c += numberOfChars; + } + } + return Integer.toString(c); + } +} diff --git a/src/entropic/XLogReader.java b/src/entropic/XLogReader.java new file mode 100644 index 0000000..3d706ba --- /dev/null +++ b/src/entropic/XLogReader.java @@ -0,0 +1,54 @@ +package entropic; + +import java.io.File; +import org.deckfour.xes.in.XMxmlGZIPParser; +import org.deckfour.xes.in.XMxmlParser; +import org.deckfour.xes.in.XParser; +import org.deckfour.xes.in.XesXmlGZIPParser; +import org.deckfour.xes.in.XesXmlParser; +import org.deckfour.xes.model.XLog; + +public class XLogReader { + public static XLog openLog(String inputLogFileName) throws Exception { + XParser parser; + XLog log = null; + if (inputLogFileName.toLowerCase().contains("mxml.gz")) { + parser = new XMxmlGZIPParser(); + if (((XMxmlGZIPParser)parser).canParse(new File(inputLogFileName))) { + try { + log = parser.parse(new File(inputLogFileName)).get(0); + } catch (Exception e) { + e.printStackTrace(); + } + } + } else if ((inputLogFileName.toLowerCase().contains("mxml") || inputLogFileName.toLowerCase().contains("xml")) && ((XMxmlParser)(parser = new XMxmlParser())).canParse(new File(inputLogFileName))) { + try { + log = parser.parse(new File(inputLogFileName)).get(0); + } catch (Exception e) { + e.printStackTrace(); + } + } + if (inputLogFileName.toLowerCase().contains("xes.gz")) { + parser = new XesXmlGZIPParser(); + if (((XesXmlGZIPParser)parser).canParse(new File(inputLogFileName))) { + try { + log = parser.parse(new File(inputLogFileName)).get(0); + } catch (Exception e) { + e.printStackTrace(); + } + } + } else if (inputLogFileName.toLowerCase().contains("xes") && ((XesXmlParser)(parser = new XesXmlParser())).canParse(new File(inputLogFileName))) { + try { + log = parser.parse(new File(inputLogFileName)).get(0); + } catch (Exception e) { + e.printStackTrace(); + } + } + if (log == null) { + throw new Exception("Oops ..."); + } + return log; + } +} + + \ No newline at end of file diff --git a/src/entropic/Xes2Numbers.java b/src/entropic/Xes2Numbers.java new file mode 100644 index 0000000..6fe4824 --- /dev/null +++ b/src/entropic/Xes2Numbers.java @@ -0,0 +1,27 @@ +package entropic; + +import java.util.HashMap; +import java.util.stream.Collectors; +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.jbpt.pm.relevance.utils.XLogReader; + +public class Xes2Numbers { + public static void main(String[] args) throws Exception { + XLog log = XLogReader.openLog(args[0]); + HashMap label2index = new HashMap(); + for (XTrace trace : log) { + for (XEvent event : trace) { + String label = ((XAttribute)event.getAttributes().get("concept:name")).toString(); + if (label2index.containsKey(label)) continue; + label2index.put(label, label2index.size()); + } + } + for (XTrace trace : log) { + System.out.println(trace.stream().map(e -> ((Integer)label2index.get(((XAttribute)e.getAttributes().get("concept:name")).toString())).toString()).collect(Collectors.joining(","))); + } + System.out.println("done"); + } +} \ No newline at end of file diff --git a/src/org/.DS_Store b/src/org/.DS_Store new file mode 100644 index 0000000..baa62c0 Binary files /dev/null and b/src/org/.DS_Store differ diff --git a/src/org/processmining/.DS_Store b/src/org/processmining/.DS_Store new file mode 100644 index 0000000..5008ddf Binary files /dev/null and b/src/org/processmining/.DS_Store differ diff --git a/src/org/processmining/plugins/bpmnminer/types/MinerSettings.java b/src/org/processmining/plugins/bpmnminer/types/MinerSettings.java new file mode 100644 index 0000000..d7d9d53 --- /dev/null +++ b/src/org/processmining/plugins/bpmnminer/types/MinerSettings.java @@ -0,0 +1,55 @@ +package org.processmining.plugins.bpmnminer.types; + +import org.deckfour.xes.classification.XEventClassifier; +import org.deckfour.xes.extension.std.XOrganizationalExtension; +import org.deckfour.xes.info.impl.XLogInfoImpl; + +public class MinerSettings { + public final static int DANGLING_PATTERN_ADD_XOR = 1; + public final static int DANGLING_PATTERN_ADD_AND = 2; + public final static int DANGLING_PATTERN_IGNORE = 3; + + public MinerSettings() { + + } + + public MinerSettings(int logSize) { + double nom = logSize / ((double)logSize + (double)dependencyDivisor); + if (nom <= 0D) nom = 0D; + if (nom >= 0.9D) nom = 0.9D; + dependencyThreshold = nom; + l1lThreshold = nom; + l2lThreshold = nom; + } + + public XEventClassifier classifier = XLogInfoImpl.NAME_CLASSIFIER; + + public double dependencyThreshold = 0.90; // [0, 1] + public double l1lThreshold = 0.90; // [0, 1] + public double l2lThreshold = 0.90; // [0, 1] + public double longDistanceThreshold = 0.90; // [0, 1] + public int dependencyDivisor = 1; // [0, n] + public double causalityStrength = 0.80; // [0, 1] + public double duplicateThreshold = 0.10; // [0, 1] + + public double patternThreshold = 0D; // [-1, 1] + + public boolean useAllConnectedHeuristics = true; + public boolean useOnlyNormalDependenciesForConnecting = false; + public boolean useLongDistanceDependency = false; + public boolean useUniqueStartEndTasks = true; + + public boolean collapseL1l = true; + public boolean preferAndToL2l = false; + public boolean preventL2lWithL1l = true; + + public int backwardContextSize = 0; // Set to 1 to mine duplicates + public int forwardContextSize = 0; // Set to 1 to mine duplicates + + public boolean suppressFitnessReport = true; + + public int danglingPatternStrategy = DANGLING_PATTERN_ADD_XOR; + + public String organizationalField = XOrganizationalExtension.KEY_RESOURCE; + +} diff --git a/src/org/processmining/slpnminer/connections/AbstractSemanticConnection.java b/src/org/processmining/slpnminer/connections/AbstractSemanticConnection.java new file mode 100644 index 0000000..01fbd7f --- /dev/null +++ b/src/org/processmining/slpnminer/connections/AbstractSemanticConnection.java @@ -0,0 +1,28 @@ +package org.processmining.slpnminer.connections; + +import org.processmining.framework.connections.impl.AbstractStrongReferencingConnection; +import org.processmining.framework.util.Cast; +import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.Marking; + +public abstract class AbstractSemanticConnection extends AbstractStrongReferencingConnection { + + public final static String NET = "Net"; + public final static String MARKING = "Marking"; + public final static String SEMANTICS = "Semantics"; + + AbstractSemanticConnection(String label, PetrinetGraph net, Marking marking, + Semantics semantics) { + super(label); + putStrong(SEMANTICS, semantics); + put(NET, net); + put(MARKING, marking); + } + + public Semantics getSemantics() { + return Cast.>cast(get(SEMANTICS)); + } + +} diff --git a/src/org/processmining/slpnminer/connections/DeadMarkingConnection.java b/src/org/processmining/slpnminer/connections/DeadMarkingConnection.java new file mode 100644 index 0000000..6068500 --- /dev/null +++ b/src/org/processmining/slpnminer/connections/DeadMarkingConnection.java @@ -0,0 +1,19 @@ +package org.processmining.slpnminer.connections; + +import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.slpnminer.models.reachabilitygraph.AcceptStateSet; + +public class DeadMarkingConnection extends AbstractSemanticConnection { + public final static String DEADMARKINGS = "Dead Markings"; + + public DeadMarkingConnection(PetrinetGraph net, Marking initial, AcceptStateSet acceptingStates, + Semantics semantics) { + super("Connection to Dead markings of " + net.getLabel(), net, initial, semantics); + put(DEADMARKINGS, acceptingStates); + } + + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/connections/ReachabilityConnection.java b/src/org/processmining/slpnminer/connections/ReachabilityConnection.java new file mode 100644 index 0000000..11387a0 --- /dev/null +++ b/src/org/processmining/slpnminer/connections/ReachabilityConnection.java @@ -0,0 +1,62 @@ +package org.processmining.slpnminer.connections; + +import org.processmining.framework.connections.impl.AbstractConnection; +import org.processmining.models.graphbased.directed.DirectedGraphElementWeights; +import org.processmining.slpnminer.models.reachabilitygraph.AcceptStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.StartStateSet; + +public class ReachabilityConnection extends AbstractConnection { + public final static String TS = "TS"; + public final static String WEIGHTS = "Weights"; + public final static String STARTIDS = "Start ids"; + public final static String ACCEPTIDS = "Accept ids"; + public final static String SETTINGS = "Settings"; + private boolean hasWeights; + private boolean hasSettings; + + public ReachabilityConnection(ReachabilityGraph ts, DirectedGraphElementWeights weights, StartStateSet starts, + AcceptStateSet accepts) { + super(ts.getLabel() + " and related mined information"); + init(ts, weights, starts, accepts, null); + } + + public ReachabilityConnection(ReachabilityGraph ts, StartStateSet starts, AcceptStateSet accepts) { + super(ts.getLabel() + " and related mined information"); + init(ts, null, starts, accepts, null); + } + + public ReachabilityConnection(ReachabilityGraph ts, DirectedGraphElementWeights weights, StartStateSet starts, + AcceptStateSet accepts, Object settings) { + super(ts.getLabel() + " and related mined information"); + init(ts, weights, starts, accepts, settings); + } + + private void init(ReachabilityGraph ts, DirectedGraphElementWeights weights, StartStateSet starts, + AcceptStateSet accepts, Object settings) { + put(TS, ts); + if (weights != null) { + put(WEIGHTS, weights); + hasWeights = true; + } else { + hasWeights = false; + } + put(STARTIDS, starts); + put(ACCEPTIDS, accepts); + if (settings != null) { + put(SETTINGS, settings); + hasSettings = true; + } else { + hasSettings = false; + } + } + + public boolean hasWeights() { + return hasWeights; + } + + public boolean hasSettings() { + return hasSettings; + } +} + diff --git a/src/org/processmining/slpnminer/connections/StateSpaceConnection.java b/src/org/processmining/slpnminer/connections/StateSpaceConnection.java new file mode 100644 index 0000000..4b689c3 --- /dev/null +++ b/src/org/processmining/slpnminer/connections/StateSpaceConnection.java @@ -0,0 +1,21 @@ +package org.processmining.slpnminer.connections; + + +import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; + +public class StateSpaceConnection extends AbstractSemanticConnection { + + public final static String STATEPACE = "Statespace"; + + + public StateSpaceConnection(PetrinetGraph net, Marking marking, ReachabilityGraph statespace, + Semantics semantics) { + super("Connection to statespace of " + net.getLabel(), net, marking, semantics); + put(STATEPACE, statespace); + } + +} diff --git a/src/org/processmining/slpnminer/connections/TransitionSystemConnection.java b/src/org/processmining/slpnminer/connections/TransitionSystemConnection.java new file mode 100644 index 0000000..d85c805 --- /dev/null +++ b/src/org/processmining/slpnminer/connections/TransitionSystemConnection.java @@ -0,0 +1,61 @@ +package org.processmining.slpnminer.connections; + +import org.processmining.framework.connections.impl.AbstractConnection; +import org.processmining.slpnminer.models.TransitionSystem; +import org.processmining.slpnminer.models.reachabilitygraph.AcceptStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.StartStateSet; +import org.processmining.models.graphbased.directed.DirectedGraphElementWeights; + +public class TransitionSystemConnection extends AbstractConnection { + public final static String TS = "TS"; + public final static String WEIGHTS = "Weights"; + public final static String STARTIDS = "Start ids"; + public final static String ACCEPTIDS = "Accept ids"; + public final static String SETTINGS = "Settings"; + private boolean hasWeights; + private boolean hasSettings; + + public TransitionSystemConnection(TransitionSystem ts, DirectedGraphElementWeights weights, StartStateSet starts, + AcceptStateSet accepts) { + super(ts.getLabel() + " and related mined information"); + init(ts, weights, starts, accepts, null); + } + + public TransitionSystemConnection(TransitionSystem ts, StartStateSet starts, AcceptStateSet accepts) { + super(ts.getLabel() + " and related mined information"); + init(ts, null, starts, accepts, null); + } + + public TransitionSystemConnection(TransitionSystem ts, DirectedGraphElementWeights weights, StartStateSet starts, + AcceptStateSet accepts, Object settings) { + super(ts.getLabel() + " and related mined information"); + init(ts, weights, starts, accepts, settings); + } + + private void init(TransitionSystem ts, DirectedGraphElementWeights weights, StartStateSet starts, + AcceptStateSet accepts, Object settings) { + put(TS, ts); + if (weights != null) { + put(WEIGHTS, weights); + hasWeights = true; + } else { + hasWeights = false; + } + put(STARTIDS, starts); + put(ACCEPTIDS, accepts); + if (settings != null) { + put(SETTINGS, settings); + hasSettings = true; + } else { + hasSettings = false; + } + } + + public boolean hasWeights() { + return hasWeights; + } + + public boolean hasSettings() { + return hasSettings; + } +} diff --git a/src/org/processmining/slpnminer/helpers/EquationSystems.java b/src/org/processmining/slpnminer/helpers/EquationSystems.java new file mode 100644 index 0000000..d2faa8a --- /dev/null +++ b/src/org/processmining/slpnminer/helpers/EquationSystems.java @@ -0,0 +1,161 @@ +package org.processmining.slpnminer.helpers; + +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Set; + +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.State; +import org.processmining.slpnminer.models.reachabilitygraph.Transition; + +public class EquationSystems { + + public String getVarString(ReachabilityGraph rg){ + Set transSet = rg.getEdges(); + + String varStr="{"; + + for(Transition eachTrans: transSet){ + varStr.concat(eachTrans.getLabel()); + } + + varStr.concat("})"); + return varStr; + } + + public Object[] getEqStr( + ReachabilityGraph rg, + HashMap tm, + HashMap> tProbMap, + HashMap combiToPetri, + HashSet reachableState){ + Set transSet = rg.getEdges(); + String varStr="({"; + HashMap stateToVar= new HashMap<>(); + HashMap transToVar= new HashMap<>(); + + Integer stateIdx = 1; + for(State state: reachableState){ + if(state.isInitiating()) { + stateToVar.put(state, "a0"); + } + else { + String stateName = "a" + stateIdx++; + stateToVar.put(state, stateName); + } + } + + for(Transition trans: transSet){ + String transName = tm.get(trans.getIdentifier()); + transToVar.put(trans, transName); + } + + int stateNum = 0; + + HashMap replTransProb = new HashMap(); + Integer idx4Repl = 0; + + for(State currentState: reachableState) + { + String eq=""; + + // if the the current state is accepting state + if(currentState.isAccepting()){ + eq = stateToVar.get(currentState)+"=="+"1"; + } + + // if the current state if just transient state + else{ + String subVarStr = stateToVar.get(currentState)+"=="; + int outEdgeNum = rg.getOutEdges(currentState).size(); + if(outEdgeNum > 1){ + int i = 0; + for (Transition trans : rg.getOutEdges(currentState)) { + + if(reachableState.contains(trans.getTarget())) { + Object stateInPetri = combiToPetri.get(currentState.getLabel()); + String transProb = tProbMap.get(transToVar.get(trans)).get(stateInPetri.toString()); + replTransProb.put("x"+idx4Repl.toString(), transProb); +// subVarStr = subVarStr.concat(stateToVar.get(trans.getTarget()) + "*" + transProb); + + subVarStr = subVarStr.concat(stateToVar.get(trans.getTarget()) + "*" + "x"+idx4Repl.toString()); + idx4Repl++; + + if(i < outEdgeNum-1) { + subVarStr = subVarStr.concat("+"); + } + } + i++; + } + if(subVarStr.substring(subVarStr.length() - 1).equals("+")) { + subVarStr = subVarStr.substring(0, subVarStr.length() - 1); + } + eq = eq.concat(subVarStr); + } + else if(outEdgeNum == 1){ + for (Transition trans : rg.getOutEdges(currentState)) { + + Object stateInPetri = combiToPetri.get(currentState.getLabel()); +// System.out.println("get the current state:"+stateInPetri); + + String transProb = tProbMap.get(transToVar.get(trans)).get(stateInPetri.toString()); + + replTransProb.put("x"+idx4Repl.toString(), transProb); + + if(transProb.equals("1")) { + subVarStr = subVarStr.concat(stateToVar.get(trans.getTarget())); + } + else { + subVarStr = subVarStr.concat(stateToVar.get(trans.getTarget()) + "*" + "x"+idx4Repl.toString()); + } + idx4Repl++; + + + eq = eq.concat(subVarStr); + } + } + } + varStr = varStr.concat(eq); + if(stateNum < reachableState.size()-1){ + varStr = varStr.concat(","); + } + stateNum++; + + } + varStr = varStr.concat("},{"); + String originalString = getStateVarLst(stateToVar); + // Remove square brackets and spaces + varStr = varStr.concat(originalString); + varStr = varStr.concat("})"); + Object[] obj = new Object[2]; + obj[0] = varStr; + obj[1] = replTransProb; + return obj; + } + + + + private String getStateVarLst(HashMap stateToVar) { + + String result = ""; + // TODO Auto-generated method stub + for(State s:stateToVar.keySet()) { + + result = result.concat(stateToVar.get(s)+","); + + } + + return result.substring(0, result.length()-1); + } + + public Set getSubStates(ReachabilityGraph rg, + State currentState){ + Set subStateSet = new HashSet<>(); + Collection allOutEdges = rg.getOutEdges(currentState); + for(Transition trans: allOutEdges){ + subStateSet.add(trans.getTarget()); + } + return subStateSet; + } +} diff --git a/src/org/processmining/slpnminer/helpers/ExpReOrganise.java b/src/org/processmining/slpnminer/helpers/ExpReOrganise.java new file mode 100644 index 0000000..55d1d63 --- /dev/null +++ b/src/org/processmining/slpnminer/helpers/ExpReOrganise.java @@ -0,0 +1,50 @@ +package org.processmining.slpnminer.helpers; + +import java.util.regex.*; + +public class ExpReOrganise { + + public static void main(String[] args) { + String expression = "(-t2^2*t4)/(-t1^3-3*t3^14*t4)"; + + getTransformedString(expression); + + } + + private static String getTransformedString(String expression) { + // Define the pattern for detecting variables with exponentiation + Pattern pattern = Pattern.compile("([a-zA-Z]\\d*)\\^\\d+"); + + // Create a matcher with the given expression + Matcher matcher = pattern.matcher(expression); + + String expression2 = expression; + // Find and print all matches + while (matcher.find()) { +// System.out.println("Variable with exponentiation: " + transformPowerExpression(matcher.group())); + expression2 = expression2.replace(matcher.group(), transformPowerExpression(matcher.group())); + } + return expression2; + } + + private static String transformPowerExpression(String powerExpression) { + // Split the expression into variable and exponent parts + String[] parts = powerExpression.split("\\^"); + + // Extract variable and exponent + String variable = parts[0]; + int exponent = Integer.parseInt(parts[1]); + + // Create the transformed expression + StringBuilder transformedExpression = new StringBuilder(); + for (int i = 0; i < exponent; i++) { + transformedExpression.append(variable); + if (i < exponent - 1) { + transformedExpression.append("*"); + } + } + + return transformedExpression.toString(); + } + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/helpers/IsolateVariable.java b/src/org/processmining/slpnminer/helpers/IsolateVariable.java new file mode 100644 index 0000000..55406ca --- /dev/null +++ b/src/org/processmining/slpnminer/helpers/IsolateVariable.java @@ -0,0 +1,89 @@ +package org.processmining.slpnminer.helpers; + +import org.matheclipse.core.basic.Config; +import org.matheclipse.core.eval.EvalEngine; +import org.matheclipse.core.eval.ExprEvaluator; +import org.matheclipse.core.expression.S; +import org.matheclipse.core.form.output.OutputFormFactory; +import org.matheclipse.core.interfaces.IExpr; +import java.io.StringWriter; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +public class IsolateVariable { + + static ExecutorService executorService; + + public static void main(String[] args) { + String res = ""; + String res2 = ""; + try { + res = getIsolatedVar("Solve({a1==a3,a2==a4,a3==a5*t1/(t0+t2+t1),a4==a1*t0/(t0+t2+t1),a5==1,a0==a2},{a1,a2,a3,a4,a5,a0})"); + res2 = getIsolatedVar("Solve({a1==a4*t10/(t10+t9),a2==a1*t7/(t7+t4),a4==1,a5==a11*t4/(t4+t1+t2+t0)+a12*t1/(t4+t1+t2+t0),a6==a10*t3/(t12+t3),a10==a12*t1/(t6+t1+t2+t0)+a5*t6/(t6+t1+t2+t0),a11==a2*t1/(t6+t1+t2+t0),a12==a2*t4/(t7+t4),a0==a6},{a1,a2,a4,a5,a6,a10,a11,a12,a0})"); + } + catch(Exception e) { + + } + finally { + System.out.println("eq system 1: "+res); + System.out.println("eq system 2: "+res2); + + } + // getIsolatedVar("Solve({x1==x12,x2==x4,x3==x11*t28/(t28+t27)+x7*t27/(t28+t27),x4==0,x5==1,x6==0,x7==x5,x8==x18*t28/(t28+t27)+x11*t27/(t28+t27),x9==0,x10==x17,x11==x14,x12==x2,x13==0,x14==0,x15==x1*t37/(t37+t63+t23)+x13*t63/(t37+t63+t23)+x10*t23/(t37+t63+t23),x16==x8,x17==0,x18==x6,x19==x9,x0==x15},{x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x0})"); + } + + public static String getIsoRes(String equationSystems) { + executorService = Executors.newSingleThreadExecutor(); + String rst = null; + Future future = executorService.submit(() -> getIsolatedVar(equationSystems)); + try { + //set timeout + rst = future.get(100, TimeUnit.SECONDS); + } catch (TimeoutException e) { + System.out.println("Timeout!"); + } catch(Exception e){ + + }finally { + executorService.shutdown(); + } + return rst; + } + + public static String getIsolatedVar(String equationSystems){ + ExprEvaluator scriptEngine = new ExprEvaluator(); + String evaledResult = printResult(scriptEngine.eval(equationSystems)); + String isolatedVar = evaledResult.substring(evaledResult.indexOf(">")+1, + evaledResult.indexOf(",")); + return isolatedVar; + } + + private static String printResult(IExpr result, boolean relaxedSyntax) { + if (result.equals(S.Null)) { + return ""; + } + final StringWriter buf = new StringWriter(); + EvalEngine engine = EvalEngine.get(); + + OutputFormFactory off; + int significantFigures = engine.getSignificantFigures(); + + off = OutputFormFactory.get(relaxedSyntax, false, significantFigures - 1, significantFigures + 1); + + if (off.convert(buf, result)) { + // print the result in the console + return buf.toString(); + } + if (Config.FUZZ_TESTING) { + throw new NullPointerException(); + } + return "ScriptEngine: ERROR-IN-OUTPUTFORM"; + } + + private static String printResult(IExpr result) { + return printResult(result, true); + } + +} diff --git a/src/org/processmining/slpnminer/helpers/ObjectOutputStream.java b/src/org/processmining/slpnminer/helpers/ObjectOutputStream.java new file mode 100644 index 0000000..138a7d7 --- /dev/null +++ b/src/org/processmining/slpnminer/helpers/ObjectOutputStream.java @@ -0,0 +1,71 @@ +package org.processmining.slpnminer.helpers; + +import java.io.File; +import java.io.FileOutputStream; +import java.util.HashMap; + +import org.deckfour.xes.model.XLog; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.statisticaltests.test.FakeContext; +import org.processmining.xeslite.plugin.OpenLogFileLiteImplPlugin; + +import dk.brics.automaton.Automaton; +import entropic.Utils; +import gnu.trove.map.TObjectShortMap; +import gnu.trove.map.custom_hash.TObjectShortCustomHashMap; +import gnu.trove.strategy.HashingStrategy; + +public class ObjectOutputStream { + + public static void main(String[] args) { + + HashingStrategy strategy = new HashingStrategy() { + + public int computeHashCode(String object) { + return object.hashCode(); + } + + public boolean equals(String o1, String o2) { + return o1.equals(o2); + } + }; + + TObjectShortMap activity2short = new TObjectShortCustomHashMap(strategy, 10, 0.5f, (short) -1); + + PluginContext context = new FakeContext(); + + XLog log1; + try { + log1 = (XLog) new OpenLogFileLiteImplPlugin().importFile(context, new File("/Applications/Programming/xes/er.xes")); + Automaton at1 = Utils.constructAutomatonFromLog(log1, activity2short); + System.out.println(at1); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + // HashMap hmap = new HashMap(); +// //Adding elements to HashMap +// hmap.put(11, "AB"); +// hmap.put(2, "CD"); +// hmap.put(33, "EF"); +// hmap.put(9, "GH"); +// hmap.put(3, "IJ"); +// try +// { +// FileOutputStream fos = +// new FileOutputStream("hashmap.ser"); +// ObjectOutputStream oos = new ObjectOutputStream(fos); +// oos.writeObject(hmap); +// oos.close(); +// fos.close(); +// System.out.printf("Serialized HashMap data is saved in hashmap.ser"); +// }catch(IOException ioe) +// { +// ioe.printStackTrace(); +// } + } + + + +} diff --git a/src/org/processmining/slpnminer/helpers/RandomNumberSplitter.java b/src/org/processmining/slpnminer/helpers/RandomNumberSplitter.java new file mode 100644 index 0000000..25b5282 --- /dev/null +++ b/src/org/processmining/slpnminer/helpers/RandomNumberSplitter.java @@ -0,0 +1,126 @@ +package org.processmining.slpnminer.helpers; + +import java.io.File; +import java.io.FileOutputStream; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Random; + +import org.deckfour.xes.factory.XFactory; +import org.deckfour.xes.factory.XFactoryRegistry; +import org.deckfour.xes.model.XAttributeMap; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.deckfour.xes.out.XSerializer; +import org.deckfour.xes.out.XesXmlSerializer; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.log.utils.XUtils; +import org.processmining.statisticaltests.test.FakeContext; +import org.processmining.xeslite.plugin.OpenLogFileLiteImplPlugin; + +public class RandomNumberSplitter { + + public static void main(String[] args) { + + PluginContext context = new FakeContext(); + + try { + XLog log = (XLog) new OpenLogFileLiteImplPlugin().importFile(context, new File("/Applications/Programming/dataset/Road_Traffic_Fine_Management_Process.xes")); + + + + List> traceIdxGroups = getTracesIndexFromLog(log); + + for (int i=0;i<5;i++) { + XLog[] testTrainLog = extractTrainAndTestByIndex(log, traceIdxGroups.get(i),XFactoryRegistry.instance().currentDefault()); + XLog testLog = testTrainLog[0]; + XLog trainLog = testTrainLog[1]; + XSerializer logSerializer = new XesXmlSerializer(); + + String file = "/Applications/Programming/dataset/RTF/" + "train"+ i+".xes"; + FileOutputStream out = new FileOutputStream(file); + logSerializer.serialize(trainLog, out); + out.close(); + + String file2 = "/Applications/Programming/dataset/RTF/" +"test"+ i+".xes"; + FileOutputStream out2 = new FileOutputStream(file2); + logSerializer.serialize(testLog, out2); + out2.close(); + } + + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + + + // Generate ten index sets for splitting log + public static List> getTracesIndexFromLog(XLog log){ + int logSize = log.size(); + int groupSize = log.size()/10; + + // Generate a list of numbers from 1 to 1000 + List numbers = new ArrayList<>(); + for (int i = 1; i <= logSize; i++) { + numbers.add(i); + } + + // Shuffle the list randomly + Collections.shuffle(numbers, new Random()); + + // Split the shuffled list into 10 groups of 100 + List> traceIdxGroups = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + int fromIndex = i * groupSize; + int toIndex = fromIndex + groupSize; + List group = numbers.subList(fromIndex, toIndex); + traceIdxGroups.add(group); + } + + return traceIdxGroups; + } + + + // Given an index set, extract sub-event logs accordingly + public static XLog [] extractTrainAndTestByIndex( + XLog log, + List traceIndexToKeep, + XFactory factory) { + + XLog testLog = XUtils.createLogFrom(log, factory); + XLog trainLog = XUtils.createLogFrom(log, factory); + + int i = 0; + for (Iterator iterator = log.iterator(); iterator.hasNext();) { + XTrace t = iterator.next(); + if (traceIndexToKeep.contains(i)) { + XTrace newTrace = factory.createTrace((XAttributeMap) t.getAttributes().clone()); + for (XEvent e : t) { + newTrace.add(factory.createEvent((XAttributeMap) e.getAttributes().clone())); + } + testLog.add(newTrace); + } + else { + XTrace newTrace = factory.createTrace((XAttributeMap) t.getAttributes().clone()); + for (XEvent e : t) { + newTrace.add(factory.createEvent((XAttributeMap) e.getAttributes().clone())); + } + trainLog.add(newTrace); + } + i++; + } + + XLog[] logObj = new XLog[2]; + logObj[0] = testLog; + logObj[1] = trainLog; + + return logObj; + } + + } + diff --git a/src/org/processmining/slpnminer/helpers/SLPNToAutomaton.java b/src/org/processmining/slpnminer/helpers/SLPNToAutomaton.java new file mode 100644 index 0000000..d3bff68 --- /dev/null +++ b/src/org/processmining/slpnminer/helpers/SLPNToAutomaton.java @@ -0,0 +1,11 @@ +package org.processmining.slpnminer.helpers; + +import org.processmining.models.graphbased.directed.petrinet.Petrinet; + +public class SLPNToAutomaton { + + public void getAutomaton(Petrinet pn) { + + } + +} diff --git a/src/org/processmining/slpnminer/helpers/StrToExp.java b/src/org/processmining/slpnminer/helpers/StrToExp.java new file mode 100644 index 0000000..752a1f6 --- /dev/null +++ b/src/org/processmining/slpnminer/helpers/StrToExp.java @@ -0,0 +1,276 @@ +package org.processmining.slpnminer.helpers; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Stack; + +public class StrToExp { + + static public double converStringToMathExp(String calculateString, HashMap strToDouble) { + return calculateInversePolandExpression(getInversePolandExpression(calculateString),strToDouble); + } + +// static public double converStringToMathExp(String calculateString) { +// return calculateInversePolandExpression(getInversePolandExpression2(calculateString)); +// } +// +// static public double converStringToMathExp( +// Map constantMap, String calculateString) { +// double result = 0; +// for (String str : constantMap.keySet()) { +// calculateString = calculateString.replaceAll(str, +// constantMap.get(str)); +// } +// result = calculateInversePolandExpression(getInversePolandExpression(calculateString)); +// return result; +// } + + + static private double calculateInversePolandExpression( + List inversePolandExpression) { + double result = 0; + Stack calculateStack = new Stack(); + for (String str : inversePolandExpression) { + if (str.equals("+") || str.equals("-") || str.equals("*") + || str.equals("/")) { + + double t1 = Double.valueOf(calculateStack.pop()); + double t2 = Double.valueOf(calculateStack.pop()); + result = simpleCalculate(t2, t1, str); + calculateStack.push(result); + } else { + calculateStack.push(Double.valueOf(str)); + } + } +// System.out.println(String.valueOf(result)); + return result; + } + + static private double calculateInversePolandExpression( + List inversePolandExpression, + HashMap strToDouble) { + + double result = 0; + Stack calculateStack = new Stack(); + for (String str : inversePolandExpression) { + + if (str.equals("+") || str.equals("-") || str.equals("*") + || str.equals("/")) { + // do the calculation for two variables. + double p1 = calculateStack.pop(); + double p2 = calculateStack.pop(); + result = simpleCalculate(p2,p1,str); + calculateStack.push(result); + } else { + if(strToDouble.containsKey(str)){ + calculateStack.push(strToDouble.get(str)); + } + else{ + calculateStack.push(Double.valueOf(str)); + } + } + } + + return result; + } + + public static List getInversePolandExpression(String exp) { + + if (exp == null) + return null; + List result2 = new ArrayList<>(); + int len = exp.length(); + Stack operator = new Stack(); + Stack reversePolish = new Stack(); + //avoid checking empty + operator.push('#'); + for (int i = 0; i < len;) { + //deal with space + while (i < len && exp.charAt(i) == ' ') + i++; + if (i == len) + break; + //if is number + + if (isVar(exp.charAt(i))) { + String num = "t"; + i++; + while (i < len && isNum(exp.charAt(i))) + num += exp.charAt(i++); + reversePolish.push(num); + } + else if (isNum(exp.charAt(i))) { + String num = ""; + while (i < len && isNum(exp.charAt(i))) + num += exp.charAt(i++); + reversePolish.push(num); + } + + else if (isOperator(exp.charAt(i))) { + char op = exp.charAt(i); + switch (op) { + case '(': + operator.push(op); + break; + case ')': + while (operator.peek() != '(') + reversePolish.push(Character.toString(operator.pop())); + operator.pop(); + break; + case '+': + case '-': + if (operator.peek() == '(') + operator.push(op); + else { + while (operator.peek() != '#' && operator.peek() != '(') + reversePolish.push(Character.toString(operator.pop())); + operator.push(op); + } + break; + case '*': + case '/': + if (operator.peek() == '(') + operator.push(op); + else { + while (operator.peek() != '#' && operator.peek() != '+' && + operator.peek() != '-' && operator.peek() != '(') + reversePolish.push(Character.toString(operator.pop())); + operator.push(op); + } + break; + } + i++; + } + } + while (operator.peek() != '#') + reversePolish.push(Character.toString(operator.pop())); + while (!reversePolish.isEmpty()) { + String temp1 = reversePolish.pop(); + result2.add(0,temp1); + } + + return result2; + } + + public static boolean isOperator(char c) { + return c == '+' || c == '-' || c == '*' || c == '/' || c == '(' || c == ')'; + } + + public static boolean isNum(char c) { + return c - '0' >= 0 && c - '0' <= 9; + } + + public static boolean isVar(char c) { + return c == 't'; + } + + + + static private List getInversePolandExpression2( + String normalExpression) { + List inversePolandExpression = new ArrayList(); + char[] normalChararray = (normalExpression + "$").toCharArray(); + // + Stack signStack = new Stack(); + List> signStackList = new ArrayList>(); + signStackList.add(signStack); + // + int level = 0; + + int pointPosition = 0; + double tempNumber = 0; + boolean isInInteger = true; + + for (int i = 0; i < normalChararray.length; i++) { + char tempChar = normalChararray[i]; + // + if (tempChar >= '0' && tempChar <= '9') { + // + if (isInInteger) { + tempNumber = tempNumber * 10 + (int) (tempChar - 48); + } + // ? + else { + tempNumber += (double) (tempChar - 48) + * Math.pow(0.1, i - pointPosition); + } + + } + // ? + else if (tempChar == '.') { + isInInteger = false; + pointPosition = i; + } + // + else if (tempChar == '+' || tempChar == '-' || tempChar == '*' + || tempChar == '/' || tempChar == '$') { + // + isInInteger = true; + // ? + if (tempNumber > 0) { + inversePolandExpression.add(String.valueOf(tempNumber)); + } + // 0 + tempNumber = 0; + // ??? + if ((tempChar == '+') || (tempChar == '-') + || tempChar == '$') { + + while (!signStackList.get(level).isEmpty()) { + // + inversePolandExpression.add(signStackList + .get(level).pop()); + } + } + // ? + + signStackList.get(level).push(tempChar + ""); + + } else if (tempChar == '(') { + signStack = new Stack(); + signStackList.add(signStack); + level++; + } else if (tempChar == ')') { + // + isInInteger = true; + // ? + if (tempNumber > 0) { + inversePolandExpression.add(String.valueOf(tempNumber)); + } + + // 0 + tempNumber = 0; + // ??? + + while (!signStackList.get(level).isEmpty()) { + // + inversePolandExpression.add(signStackList.get(level) + .pop()); + + } + level--; + } + } +// System.out.println(inversePolandExpression); + return inversePolandExpression; + } + + + static private double simpleCalculate(double x, double y, String sign) { + double result = 0; + if (sign.equals("+")) { + result = x + y; + } else if (sign.equals("-")) { + result = x - y; + } else if (sign.equals("*")) { + result = x * y; + } else if (sign.equals("/")) { + result = x / y; + } + return result; + + } + +} diff --git a/src/org/processmining/slpnminer/helpers/TimeoutTest.java b/src/org/processmining/slpnminer/helpers/TimeoutTest.java new file mode 100644 index 0000000..c718d83 --- /dev/null +++ b/src/org/processmining/slpnminer/helpers/TimeoutTest.java @@ -0,0 +1,85 @@ +package org.processmining.slpnminer.helpers; + +import java.util.concurrent.*; +import java.io.StringWriter; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.Executors; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; + +import org.matheclipse.core.basic.Config; +import org.matheclipse.core.eval.EvalEngine; +import org.matheclipse.core.eval.ExprEvaluator; +import org.matheclipse.core.expression.S; +import org.matheclipse.core.form.output.OutputFormFactory; +import org.matheclipse.core.interfaces.IExpr; + + +public class TimeoutTest { + + public static void main(String[] args) { + for(int i=0;i<5;i++) { + String rst = getIsoRes(); + if (rst==null) { + System.out.println("执行超时!"); + } + else { + System.out.println("get result: "+rst); + } + } + } + + public static String getIsoRes() { + ExecutorService executorService = Executors.newSingleThreadExecutor(); + String rst = null; + Future future = executorService.submit(() -> getIsolatedVar("Solve({a1==a7,a2==a28*t5/(t5+t4+t9+t8+t3)+a27*t9/(t5+t4+t9+t8+t3),a3==a28*t6/(t9+t8+t6)+a11*t9/(t9+t8+t6),a4==a11,a5==a30*t5/(t5+t4+t3+t14+t13)+a27*t14/(t5+t4+t3+t14+t13),a6==1,a7==a9*t5/(t5+t4+t9+t8+t3+t14+t13)+a21*t9/(t5+t4+t9+t8+t3+t14+t13)+a19*t14/(t5+t4+t9+t8+t3+t14+t13)+a17*t3/(t5+t4+t9+t8+t3+t14+t13),a8==a3*t3/(t9+t8+t3)+a4*t9/(t9+t8+t3),a9==a24*t9/(t9+t8+t3+t14+t13)+a8*t14/(t9+t8+t3+t14+t13)+a20*t3/(t9+t8+t3+t14+t13),a10==a13*t12/(t12+t6),a11==a22*t6/(t12+t6),a12==a30*t9/(t9+t8+t3+t14+t13)+a28*t14/(t9+t8+t3+t14+t13),a13==a6*t0/(t0+t1),a14==a1*t19/(t19+t18+t17),a15==a14,a16==a27*t6/(t5+t4+t6)+a11*t5/(t5+t4+t6),a0==a15,a17==a29*t9/(t5+t4+t9+t8+t6+t14+t13)+a18*t6/(t5+t4+t9+t8+t6+t14+t13)+a20*t5/(t5+t4+t9+t8+t6+t14+t13)+a25*t14/(t5+t4+t9+t8+t6+t14+t13),a18==a12*t5/(t5+t4+t9+t8+t3+t14+t13)+a5*t9/(t5+t4+t9+t8+t3+t14+t13)+a2*t14/(t5+t4+t9+t8+t3+t14+t13),a19==a8*t5/(t5+t4+t9+t8+t3)+a23*t9/(t5+t4+t9+t8+t3)+a25*t3/(t5+t4+t9+t8+t3),a20==a12*t6/(t9+t8+t6+t14+t13)+a26*t9/(t9+t8+t6+t14+t13)+a3*t14/(t9+t8+t6+t14+t13),a21==a23*t14/(t5+t4+t3+t14+t13)+a24*t5/(t5+t4+t3+t14+t13)+a29*t3/(t5+t4+t3+t14+t13),a22==a10,a23==a16*t3/(t5+t4+t3)+a4*t5/(t5+t4+t3),a24==a4*t14/(t3+t14+t13)+a26*t3/(t3+t14+t13),a25==a2*t6/(t5+t4+t9+t8+t6)+a3*t5/(t5+t4+t9+t8+t6)+a16*t9/(t5+t4+t9+t8+t6),a26==a30*t6/(t6+t14+t13)+a11*t14/(t6+t14+t13),a27==a22*t5/(t5+t4+t3),a28==a22*t9/(t9+t8+t3),a29==a5*t6/(t5+t4+t6+t14+t13)+a26*t5/(t5+t4+t6+t14+t13)+a16*t14/(t5+t4+t6+t14+t13),a30==a22*t14/(t3+t14+t13)},{a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15,a16,a0,a17,a18,a19,a20,a21,a22,a23,a24,a25,a26,a27,a28,a29,a30})")); + + try { + //设置超时时间 + rst = future.get(5, TimeUnit.SECONDS); + } catch (TimeoutException e) { + } catch(Exception e){ + }finally { + executorService.shutdown(); + } + return rst; + } + + public static String getIsolatedVar(String equationSystems){ + System.out.println("eq system: "+equationSystems); + ExprEvaluator scriptEngine = new ExprEvaluator(); + String evaledResult = printResult(scriptEngine.eval(equationSystems)); + String isolatedVar = evaledResult.substring(evaledResult.indexOf(">")+1, + evaledResult.indexOf(",")); +// System.out.println(isolatedVar); + return isolatedVar; + } + + private static String printResult(IExpr result, boolean relaxedSyntax) { + if (result.equals(S.Null)) { + return ""; + } + final StringWriter buf = new StringWriter(); + EvalEngine engine = EvalEngine.get(); + + OutputFormFactory off; + int significantFigures = engine.getSignificantFigures(); + + off = OutputFormFactory.get(relaxedSyntax, false, significantFigures - 1, significantFigures + 1); + + if (off.convert(buf, result)) { + // print the result in the console + return buf.toString(); + } + if (Config.FUZZ_TESTING) { + throw new NullPointerException(); + } + return "ScriptEngine: ERROR-IN-OUTPUTFORM"; + } + + private static String printResult(IExpr result) { + return printResult(result, true); + } +} diff --git a/src/org/processmining/slpnminer/models/AbstractResetInhibitorNet.java b/src/org/processmining/slpnminer/models/AbstractResetInhibitorNet.java new file mode 100644 index 0000000..7084aef --- /dev/null +++ b/src/org/processmining/slpnminer/models/AbstractResetInhibitorNet.java @@ -0,0 +1,440 @@ +package org.processmining.slpnminer.models; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import org.processmining.models.graphbased.AttributeMap; +import org.processmining.models.graphbased.directed.AbstractDirectedGraph; +import org.processmining.models.graphbased.directed.DirectedGraph; +import org.processmining.models.graphbased.directed.DirectedGraphEdge; +import org.processmining.models.graphbased.directed.DirectedGraphElement; +import org.processmining.models.graphbased.directed.DirectedGraphNode; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.elements.Arc; +import org.processmining.models.graphbased.directed.petrinet.elements.ExpandableSubNet; +import org.processmining.models.graphbased.directed.petrinet.elements.InhibitorArc; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.ResetArc; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; + +// This abstract class implements Petrinet, Resetnet, InhibitorNet, but +// does not declare that. Declaration is done in subclasses, so the +// inheritance relation is not mixed (i.e. each Petrinet is a Resetnet, +// but no resetEdges can be added to a Petrinet). + +// All implementing classes should decide which interfaces to implement. + +public abstract class AbstractResetInhibitorNet extends + AbstractDirectedGraph> { + + protected final Set transitions; + protected final Set substitutionTransitions; + protected final Set places; + protected final Set arcs; + protected final Set resetArcs; + protected final Set inhibitorArcs; + + public AbstractResetInhibitorNet(boolean allowsReset, boolean allowsInhibitors) { + super(); + transitions = new LinkedHashSet(); + substitutionTransitions = new LinkedHashSet(); + places = new LinkedHashSet(); + arcs = new LinkedHashSet(); + resetArcs = allowsReset ? new LinkedHashSet() : Collections.emptySet(); + inhibitorArcs = allowsInhibitors ? new LinkedHashSet() : Collections.emptySet(); + } + + public synchronized ResetArc addResetArc(Place p, Transition t) { + return addResetArc(p, t, null, null); + } + + public synchronized ResetArc addResetArc(Place p, Transition t, ExpandableSubNet parent) { + return addResetArc(p, t, null, parent); + } + + public synchronized ResetArc addResetArc(Place p, Transition t, String label) { + return addResetArc(p, t, label, null); + } + + public synchronized ResetArc addResetArc(Place p, Transition t, String label, ExpandableSubNet parent) { + checkAddEdge(p, t); + ResetArc a = new ResetArc(p, t, (label == null ? p.toString() + " -->> " + t.toString() : label), parent); + if (resetArcs.add(a)) { + graphElementAdded(a); + return a; + } else { + for (ResetArc existing : resetArcs) { + if (existing.equals(a)) { + if (label != null) { + existing.getAttributeMap().put(AttributeMap.LABEL, label); + } + return existing; + } + } + } + assert (false); + return null; + } + + public synchronized ResetArc removeResetArc(Place p, Transition t) { + return removeFromEdges(p, t, resetArcs); + } + + public synchronized InhibitorArc addInhibitorArc(Place p, Transition t, String label) { + return addInhibitorArc(p, t, label, null); + } + + public synchronized InhibitorArc addInhibitorArc(Place p, Transition t, String label, ExpandableSubNet parent) { + checkAddEdge(p, t); + InhibitorArc a = new InhibitorArc(p, t, (label == null ? p.toString() + " ---O " + t.toString() : label), + parent); + if (inhibitorArcs.add(a)) { + graphElementAdded(a); + return a; + } else { + for (InhibitorArc existing : inhibitorArcs) { + if (existing.equals(a)) { + if (label != null) { + existing.getAttributeMap().put(AttributeMap.LABEL, label); + } + return existing; + } + } + } + assert (false); + return null; + } + + public synchronized InhibitorArc addInhibitorArc(Place p, Transition t) { + return addInhibitorArc(p, t, null, null); + } + + public synchronized InhibitorArc addInhibitorArc(Place p, Transition t, ExpandableSubNet parent) { + return addInhibitorArc(p, t, null, parent); + } + + public synchronized InhibitorArc removeInhibitorArc(Place p, Transition t) { + return removeFromEdges(p, t, inhibitorArcs); + } + + public synchronized InhibitorArc getInhibitorArc(Place p, Transition t) { + Collection set = getEdges(p, t, inhibitorArcs); + return (set.isEmpty() ? null : set.iterator().next()); + } + + public synchronized ResetArc getResetArc(Place p, Transition t) { + Collection set = getEdges(p, t, resetArcs); + return (set.isEmpty() ? null : set.iterator().next()); + } + + public synchronized Transition addTransition(String label) { + return addTransition(label, null); + } + + /* + * (non-Javadoc) + * + * @see + * org.processmining.models.graphbased.petrinet.Petrinet#addTransition(java + * .lang.String) + */ + + public synchronized Transition addTransition(String label, ExpandableSubNet parent) { + Transition t = new Transition(label, this, parent); + transitions.add(t); + graphElementAdded(t); + return t; + } + + public synchronized ExpandableSubNet addGroup(String label) { + return addGroup(label, null); + } + + /* + * (non-Javadoc) + * + * @see + * org.processmining.models.graphbased.petrinet.Petrinet#addTransition(java + * .lang.String) + */ + + public synchronized ExpandableSubNet addGroup(String label, ExpandableSubNet parent) { + ExpandableSubNet t = new ExpandableSubNet(label, this, parent); + substitutionTransitions.add(t); + graphElementAdded(t); + return t; + } + + public synchronized Place addPlace(String label) { + return addPlace(label, null); + } + + /* + * (non-Javadoc) + * + * @see + * org.processmining.models.graphbased.petrinet.Petrinet#addPlace(java.lang + * .String) + */ + public synchronized Place addPlace(String label, ExpandableSubNet parent) { + Place p = new Place(label, this, parent); + places.add(p); + graphElementAdded(p); + return p; + } + + //************************************************************************** + // Adding regular arcs p -> t + + protected synchronized Arc addArcPrivate(PetrinetNode src, PetrinetNode trgt, int weight, ExpandableSubNet parent) { + synchronized (arcs) { + checkAddEdge(src, trgt); + Arc a = new Arc(src, trgt, weight, parent); + if (arcs.add(a)) { + graphElementAdded(a); + return a; + } else { + for (Arc existing : arcs) { + if (existing.equals(a)) { + existing.setWeight(existing.getWeight() + weight); + return existing; + } + } + } + assert (false); + return null; + } + } + + /* + * (non-Javadoc) + * + * @seeorg.processmining.models.graphbased.petrinet.Petrinet#addArc(org. + * processmining.models.graphbased.petrinet.Place, + * org.processmining.models.graphbased.petrinet.Transition, int) + */ + public synchronized Arc addArc(Place p, Transition t, int weight) { + return addArcPrivate(p, t, weight, null); + } + + /* + * (non-Javadoc) + * + * @seeorg.processmining.models.graphbased.petrinet.Petrinet#addArc(org. + * processmining.models.graphbased.petrinet.Place, + * org.processmining.models.graphbased.petrinet.Transition) + */ + public synchronized Arc addArc(Place p, Transition t) { + return addArc(p, t, 1); + } + + //************************************************************************** + // Adding regular arcs t -> p + + /* + * (non-Javadoc) + * + * @seeorg.processmining.models.graphbased.petrinet.Petrinet#addArc(org. + * processmining.models.graphbased.petrinet.Transition, + * org.processmining.models.graphbased.petrinet.Place, int) + */ + public synchronized Arc addArc(Transition t, Place p, int weight) { + return addArcPrivate(t, p, weight, null); + } + + /* + * (non-Javadoc) + * + * @seeorg.processmining.models.graphbased.petrinet.Petrinet#addArc(org. + * processmining.models.graphbased.petrinet.Transition, + * org.processmining.models.graphbased.petrinet.Place) + */ + public synchronized Arc addArc(Transition t, Place p) { + return addArc(t, p, 1); + } + + /* + * (non-Javadoc) + * + * @seeorg.processmining.models.graphbased.petrinet.Petrinet#addArc(org. + * processmining.models.graphbased.petrinet.Place, + * org.processmining.models.graphbased.petrinet.Transition, int) + */ + public synchronized Arc addArc(Place p, Transition t, int weight, ExpandableSubNet parent) { + return addArcPrivate(p, t, weight, parent); + } + + /* + * (non-Javadoc) + * + * @seeorg.processmining.models.graphbased.petrinet.Petrinet#addArc(org. + * processmining.models.graphbased.petrinet.Place, + * org.processmining.models.graphbased.petrinet.Transition) + */ + public synchronized Arc addArc(Place p, Transition t, ExpandableSubNet parent) { + return addArc(p, t, 1, parent); + } + + //************************************************************************** + // Adding regular arcs t -> p + + /* + * (non-Javadoc) + * + * @seeorg.processmining.models.graphbased.petrinet.Petrinet#addArc(org. + * processmining.models.graphbased.petrinet.Transition, + * org.processmining.models.graphbased.petrinet.Place, int) + */ + public synchronized Arc addArc(Transition t, Place p, int weight, ExpandableSubNet parent) { + return addArcPrivate(t, p, weight, parent); + } + + /* + * (non-Javadoc) + * + * @seeorg.processmining.models.graphbased.petrinet.Petrinet#addArc(org. + * processmining.models.graphbased.petrinet.Transition, + * org.processmining.models.graphbased.petrinet.Place) + */ + public synchronized Arc addArc(Transition t, Place p, ExpandableSubNet parent) { + return addArc(t, p, 1, parent); + } + + public synchronized Arc getArc(PetrinetNode source, PetrinetNode target) { + Collection set = getEdges(source, target, arcs); + return (set.isEmpty() ? null : set.iterator().next()); + } + + public synchronized Arc removeArc(PetrinetNode source, PetrinetNode target) { + Arc a = removeFromEdges(source, target, arcs); + return a; + } + + @SuppressWarnings("unchecked") + public synchronized void removeEdge(DirectedGraphEdge edge) { + if (edge instanceof InhibitorArc) { + inhibitorArcs.remove(edge); + } else if (edge instanceof ResetArc) { + resetArcs.remove(edge); + } else if (edge instanceof Arc) { + arcs.remove(edge); + } else { + assert (false); + } + graphElementRemoved(edge); + } + + public synchronized void removeNode(DirectedGraphNode node) { + if (node instanceof Transition) { + removeTransition((Transition) node); + } else if (node instanceof Place) { + removePlace((Place) node); + } else { + assert (false); + } + } + + public synchronized Place removePlace(Place place) { + removeSurroundingEdges(place); + return removeNodeFromCollection(places, place); + } + + public synchronized Transition removeTransition(Transition transition) { + removeSurroundingEdges(transition); + return removeNodeFromCollection(transitions, transition); + } + + public synchronized ExpandableSubNet removeGroup(ExpandableSubNet transition) { + removeSurroundingEdges(transition); + return removeNodeFromCollection(substitutionTransitions, transition); + } + + public synchronized Set getNodes() { + Set nodes = new HashSet(); + nodes.addAll(transitions); + nodes.addAll(places); + nodes.addAll(substitutionTransitions); + return nodes; + } + + public synchronized Set> getEdges() { + Set> edges = new HashSet>(); + edges.addAll(arcs); + edges.addAll(inhibitorArcs); + edges.addAll(resetArcs); + return edges; + } + + // It's safe to assume that the input is an AbstractResetInhibitorNet. + protected synchronized Map cloneFrom( + DirectedGraph> graph) { + AbstractResetInhibitorNet net = (AbstractResetInhibitorNet) graph; + return cloneFrom(net, true, true, true, true, true); + } + + // It's safe to assume that the input is an AbstractResetInhibitorNet. + protected synchronized Map cloneFrom(AbstractResetInhibitorNet net, + boolean transitions, boolean places, boolean arcs, boolean resets, boolean inhibitors) { + + HashMap mapping = new HashMap(); + + if (transitions) { + for (Transition t : net.transitions) { + Transition copy = addTransition(t.getLabel()); + copy.setInvisible(t.isInvisible()); + mapping.put(t, copy); + } + } + if (places) { + for (Place p : net.places) { + Place copy = addPlace(p.getLabel()); + mapping.put(p, copy); + } + } + if (arcs) { + for (Arc a : net.arcs) { + mapping.put(a, addArcPrivate((PetrinetNode) mapping.get(a.getSource()), (PetrinetNode) mapping.get(a + .getTarget()), a.getWeight(), a.getParent())); + } + } + if (inhibitors) { + for (InhibitorArc a : net.inhibitorArcs) { + mapping.put(a, addInhibitorArc((Place) mapping.get(a.getSource()), (Transition) mapping.get(a + .getTarget()), a.getLabel())); + } + } + if (resets) { + for (ResetArc a : net.resetArcs) { + mapping.put(a, addResetArc((Place) mapping.get(a.getSource()), (Transition) mapping.get(a.getTarget()), + a.getLabel())); + } + } + + getAttributeMap().clear(); + AttributeMap map = net.getAttributeMap(); + for (String key : map.keySet()) { + getAttributeMap().put(key, map.get(key)); + } + + return mapping; + } + + public synchronized Collection getPlaces() { + return Collections.unmodifiableCollection(places); + } + + public synchronized Collection getTransitions() { + return Collections.unmodifiableCollection(transitions); + } + + public synchronized Collection getGroups() { + return Collections.unmodifiableCollection(substitutionTransitions); + } + +} diff --git a/src/org/processmining/slpnminer/models/AbstractResetInhibitorNetSemantics.java b/src/org/processmining/slpnminer/models/AbstractResetInhibitorNetSemantics.java new file mode 100644 index 0000000..f24148e --- /dev/null +++ b/src/org/processmining/slpnminer/models/AbstractResetInhibitorNetSemantics.java @@ -0,0 +1,149 @@ +package org.processmining.slpnminer.models; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; + +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.elements.Arc; +import org.processmining.models.graphbased.directed.petrinet.elements.InhibitorArc; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.ResetArc; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.models.semantics.petrinet.PetrinetExecutionInformation; + +abstract class AbstractResetInhibitorNetSemantics { + + protected Marking state; + private Collection transitions; + + public AbstractResetInhibitorNetSemantics() { + this(null); + } + + public AbstractResetInhibitorNetSemantics(Marking state) { + this.state = state; + + } + + public void initialize(Collection transitions, Marking state) { + this.transitions = transitions; + setCurrentState(state); + } + + protected Collection getTransitions() { + return Collections.unmodifiableCollection(transitions); + } + + protected boolean isEnabled(Marking state, Marking required, Transition t) { + if (required.isLessOrEqual(state)) { + for (PetrinetEdge e : t.getGraph().getInEdges(t)) { + if (e instanceof InhibitorArc) { + InhibitorArc arc = (InhibitorArc) e; + if (state.occurrences(arc.getSource()) > 0) { + return false; + } + } + } + return true; + } + return false; + } + + public Marking getCurrentState() { + return state; + } + + public void setCurrentState(Marking currentState) { + state = currentState; + } + + public PetrinetExecutionInformation executeExecutableTransition(Transition toExecute) + throws IllegalTransitionException { + Marking required = getRequired(toExecute); + Marking newState = new Marking(state); + if (!isEnabled(state, required, toExecute)) { + throw new IllegalTransitionException(toExecute, newState); + } + Marking produced = getProduced(toExecute); + newState.addAll(produced); + Marking toRemove = getRemoved(toExecute); + newState.removeAll(toRemove); + state = newState; + return new PetrinetExecutionInformation(required, toRemove, produced, toExecute); + } + + public Collection getExecutableTransitions() { + if (state == null) { + return null; + } + // the tokens are divided over the places according to state + ArrayList enabled = new ArrayList(); + for (Transition trans : getTransitions()) { + if (isEnabled(state, getRequired(trans), trans)) { + enabled.add(trans); + } + } + return enabled; + } + + protected Marking getRequired(Transition trans) { + Collection> edges = trans.getGraph().getInEdges( + trans); + Marking required = new Marking(); + for (PetrinetEdge e : edges) { + if (e instanceof Arc) { + Arc arc = (Arc) e; + required.add((Place) arc.getSource(), arc.getWeight()); + } + } + return required; + + } + + protected Marking getProduced(Transition trans) { + Collection> edges = trans.getGraph().getOutEdges( + trans); + Marking produced = new Marking(); + for (PetrinetEdge e : edges) { + if (e instanceof Arc) { + Arc arc = (Arc) e; + produced.add((Place) arc.getTarget(), arc.getWeight()); + } + } + + return produced; + + } + + protected Marking getRemoved(Transition trans) { + Collection> edges = trans.getGraph().getInEdges( + trans); + Marking removed = new Marking(getRequired(trans)); + for (PetrinetEdge e : edges) { + if (e instanceof ResetArc) { + ResetArc arc = (ResetArc) e; + removed.add(arc.getSource(), state.occurrences(arc.getSource())); + } + } + return removed; + } + + public String toString() { + return "Regular Semantics"; + } + + public int hashCode() { + return getClass().hashCode(); + } + + public boolean equals(Object o) { + if (o == null) { + return false; + } + return this.getClass().equals(o.getClass()); + } +} diff --git a/src/org/processmining/slpnminer/models/AttributeMap.java b/src/org/processmining/slpnminer/models/AttributeMap.java new file mode 100644 index 0000000..ed8059a --- /dev/null +++ b/src/org/processmining/slpnminer/models/AttributeMap.java @@ -0,0 +1,165 @@ +package org.processmining.slpnminer.models; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; + +public class AttributeMap { + private final static String PREFIX = "TransitionSystem_"; + + public enum ArrowType { + ARROWTYPE_CLASSIC(PREFIX + "arrow_classic"), // + ARROWTYPE_TECHNICAL(PREFIX + "arrow_tech"), // + ARROWTYPE_SIMPLE(PREFIX + "arrow_simple"), // + ARROWTYPE_DIAMOND(PREFIX + "arrow_diamond"), // + ARROWTYPE_CIRCLE(PREFIX + "arrow_circle"), // + ARROWTYPE_LINE(PREFIX + "arrow_line"), // + ARROWTYPE_DOUBLELINE(PREFIX + "arrow_double"), // + ARROWTYPE_NONE(PREFIX + "arrow_none"), // + ARROW_CROSS(PREFIX + "arrow_cross"), // + ARROW_TECHNICAL_CIRCLE(PREFIX + "arrow_technical_circle"); + + ArrowType(String s) { + } + } + + public final static String SHAPE = PREFIX + "shape"; + public static final String SHAPEDECORATOR = PREFIX + "ShapeDecorator"; + + public final static String FILLCOLOR = PREFIX + "fillcolor"; + public final static String GRADIENTCOLOR = PREFIX + "gradientcolor"; + public final static String ICON = PREFIX + "icon"; + public final static String BORDERWIDTH = PREFIX + "border"; + public final static String LABEL = PREFIX + "label"; + + public final static String TOOLTIP = PREFIX + "tooltip"; + + public final static String EDGESTART = PREFIX + "edgestart"; + public final static String EDGESTARTFILLED = PREFIX + "edgeStartFilled"; + + public final static String EDGEEND = PREFIX + "edge end"; + public final static String EDGEENDFILLED = PREFIX + "edgeEndFilled"; + + public final static String EDGEMIDDLE = PREFIX + "edge middle"; + public final static String EDGEMIDDLEFILLED = PREFIX + "edgeMiddleFilled"; + + public final static String LABELVERTICALALIGNMENT = PREFIX + "labelVerticalAlignment"; + public final static String EDGECOLOR = PREFIX + "edgeColor"; // added by arya + public final static String STROKECOLOR = PREFIX + "strokeColor"; // added by arya + + public final static String INSET = PREFIX + "inset"; // added by jribeiro + public final static String STROKE = PREFIX + "stroke"; // added by jribeiro + public final static String DASHPATTERN = PREFIX + "dashPattern"; // added by jribeiro + public final static String DASHOFFSET = PREFIX + "dashOffset"; // added by jribeiro + public final static String LABELCOLOR = PREFIX + "labelColor"; // added by jribeiro + public final static String LABELALONGEDGE = PREFIX + "labelAlongEdge"; // added by jribeiro + + /** + * A Float representing the linewidth of a line. + */ + public final static String LINEWIDTH = PREFIX + "lineWidth"; + public final static String NUMLINES = PREFIX + "numLines"; + + public final static String STYLE = PREFIX + "style"; + + public final static String POLYGON_POINTS = PREFIX + "polygonpoints"; + + public static final String SQUAREBB = PREFIX + "squareBB"; + public static final String RESIZABLE = PREFIX + "resizable"; + public static final String AUTOSIZE = PREFIX + "autosize"; + public static final String SHOWLABEL = PREFIX + "showLabel"; + public static final String MOVEABLE = PREFIX + "movable"; // added by arya + + /** + * This should be set to SwingConstants.SOUTH, SwingConstants.WEST and so + * on. SwingConstants.NORTH means the graph prefers drawn Top-Down + * SwingConstants.WEST means the graph prefers drawn Left to Right + */ + public static final String PREF_ORIENTATION = PREFIX + "orientation"; + public static final String LABELHORIZONTALALIGNMENT = PREFIX + "horizontal alignment"; + public static final String SIZE = "size"; + public static final String PORTOFFSET = "portoffset"; + + /** + * The value of this attribute should be an array of type Point2D[] + * (size>0), as used in the method GraphConstants.setExtraLabelPositions() + */ + public static final String EXTRALABELPOSITIONS = "Label positions"; + /** + * The value of this attribute should be an array of type String[] (size>0), + * as used in the method GraphConstants.setExtraLabels() + */ + public static final String EXTRALABELS = "Extra Labels"; + + /** + * Renderer to be used. + */ + public static final String RENDERER = "Renderer"; + + private final Map mapping = new LinkedHashMap(); + + public AttributeMap() { + } + + public Object get(String key) { + return mapping.get(key); + } + + @SuppressWarnings("unchecked") + public T get(String key, T defaultValue) { + synchronized (mapping) { + Object o = mapping.get(key); + if (o != null) { + return (T) o; + } + if (mapping.containsKey(key)) { + return null; + } else { + return defaultValue; + } + } + } + + public void clear() { + mapping.clear(); + } + + public Set keySet() { + return mapping.keySet(); + } + + /** + * This method updates the map and signals the owner. The origin is passed + * in this update, to make sure that no unnecessary updates are performed + * + * @param key + * @param value + * @param origin + * @return + */ + public boolean put(String key, Object value) { + Object old; + synchronized (mapping) { + old = mapping.get(key); + mapping.put(key, value); + } + if (value == old) { + return false; + } + if ((value == null) || (old == null) || !value.equals(old)) { + return true; + } + return false; + } + + public void remove(String key) { + synchronized (mapping) { + mapping.remove(key); + } + } + + public boolean containsKey(String key) { + return mapping.containsKey(key); + } + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/BoundednessAnalyzer.java b/src/org/processmining/slpnminer/models/BoundednessAnalyzer.java new file mode 100644 index 0000000..68d2bf2 --- /dev/null +++ b/src/org/processmining/slpnminer/models/BoundednessAnalyzer.java @@ -0,0 +1,263 @@ +package org.processmining.slpnminer.models; + +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.framework.util.collection.MultiSet; +import org.processmining.framework.util.collection.TreeMultiSet; +import org.processmining.models.connections.petrinets.behavioral.*; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.analysis.CoverabilitySet; +import org.processmining.models.graphbased.directed.petrinet.analysis.NetAnalysisInformation; +import org.processmining.models.graphbased.directed.petrinet.analysis.UnboundedPlacesSet; +import org.processmining.models.graphbased.directed.petrinet.analysis.UnboundedSequences; +import org.processmining.models.graphbased.directed.petrinet.analysis.NetAnalysisInformation.UnDetBool; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.semantics.petrinet.CTMarking; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.models.semantics.petrinet.PetrinetSemantics; +import org.processmining.models.semantics.petrinet.impl.PetrinetSemanticsFactory; +import org.processmining.slpnminer.models.reachabilitygraph.CoverabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.State; + +import java.util.*; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; + + +/** + * Class to analyze whether a given PetriNet is bounded Based on Murata, Tadao. + * Petri Nets:Properties, Analysis, and Applications. Proceedings of the IEEE + * vol. 77, No.4, April 1989 a net is bounded iff omega notation does not appear + * in a any node labels in coverability graph + * + * @author arya + * @email arya.adriansyah@gmail.com + * @version Dec 13, 2008 + */ +@Plugin(name = "Analyze Boundedness", returnLabels = { "Boundedness info", "Unbounded Places", "Unbounded Sequences" }, returnTypes = { + NetAnalysisInformation.BOUNDEDNESS.class, UnboundedPlacesSet.class, UnboundedSequences.class }, parameterLabels = { + "Net", "Marking", "Coverability Set", "Semantics" }, userAccessible = true) +public class BoundednessAnalyzer { + + // variant with only petri net as input + @PluginVariant(variantLabel = "Analyze Boundedness", requiredParameterLabels = { 0 }) + public Object[] analyzeBoundednessPetriNet(PluginContext context, Petrinet net) throws ConnectionCannotBeObtained { + Marking state = context.tryToFindOrConstructFirstObject(Marking.class, InitialMarkingConnection.class, + InitialMarkingConnection.MARKING, net); + return analyzeBoundednessPetriNet(context, net, state); + } + + // variant with only petri net and marking as input + @PluginVariant(variantLabel = "Analyze Boundedness", requiredParameterLabels = { 0, 1 }) + public Object[] analyzeBoundednessPetriNet(PluginContext context, Petrinet net, Marking state) + throws ConnectionCannotBeObtained { + return analyzeBoundednessPetriNet(context, net, state, PetrinetSemanticsFactory + .regularPetrinetSemantics(Petrinet.class)); + } + + // variant with only petri net, marking, and semantic as input + @PluginVariant(variantLabel = "Analyze Boundedness", requiredParameterLabels = { 0, 1, 3 }) + public Object[] analyzeBoundednessPetriNet(PluginContext context, Petrinet net, Marking state, + PetrinetSemantics semantics) throws ConnectionCannotBeObtained { + context.getConnectionManager().getFirstConnection(InitialMarkingConnection.class, context, net, state); + semantics.initialize(net.getTransitions(), new Marking(state)); + + // check if there is already generated coverability set + CoverabilitySet coverabilitySet = null; + coverabilitySet = context.tryToFindOrConstructFirstObject(CoverabilitySet.class, + CoverabilitySetConnection.class, CoverabilitySetConnection.MARKINGS, net, state, semantics); + + return analyzeBoundednessPetriNet(context, net, state, coverabilitySet, semantics); + } + + // variant with petri net, marking, and coverability graph as input + @PluginVariant(variantLabel = "Analyze Boundedness", requiredParameterLabels = { 0, 1, 2 }) + public Object[] analyzeBoundednessPetriNet(PluginContext context, Petrinet net, Marking state, + CoverabilitySet covSet, PetrinetSemantics semantics) throws ConnectionCannotBeObtained { + return analyzeBoundednessPetriNetInternal(context, net, state, covSet, semantics); + } + + /** + * Analyze boundedness of a petri net + * + * @param context + * Context of the petri net + * @param net + * net to be analyzed + * @param state + * Initial state (initial marking) + * @param covSet + * Coverability set of this petri net and marking + * @param semantics + * Semantic of this petri net + * @return An array of three objects: 1. type NetAnalysisInformation, info + * about boundedness of the net 2. type UnboundedPlacesSet, contains + * set of set of unbounded places 3. type UnboudnedSequences, + * contains set of unbounded sequences + * @throws Exception + */ + private Object[] analyzeBoundednessPetriNetInternal(PluginContext context, PetrinetGraph net, Marking state, + CoverabilitySet covSet, PetrinetSemantics semantics) throws ConnectionCannotBeObtained { + // check connection between coverability graph, net, and marking + context.getConnectionManager().getFirstConnection(MarkingsetNetConnection.class, context, net, covSet, + semantics); + + semantics.initialize(net.getTransitions(), state); + Object[] result = analyzeBoundednessAssumingConnection(context, net, state, covSet, semantics); + context.addConnection(new BoundednessInfoConnection(net, state, semantics, (NetAnalysisInformation.BOUNDEDNESS) result[0])); + context.addConnection(new UnboundedPlacesConnection(net, (UnboundedPlacesSet) result[1], state, semantics)); + context.addConnection(new UnboundedSequencesConnection(net, (UnboundedSequences) result[2], state, semantics)); + context.getFutureResult(0).setLabel("Boundedness Analysis of " + net.getLabel()); + context.getFutureResult(1).setLabel("Unbounded Places of " + net.getLabel()); + context.getFutureResult(2).setLabel("Unbounded Sequences of " + net.getLabel()); + return result; + + } + + /** + * Static method to check boundedness, given a coverability graph + * + * @param graph + * Coverability graph + * @return true if the net is bounded, false if it is not + */ + public static boolean isBounded(CoverabilityGraph graph) { + boolean boundedness = true; + Iterator it = graph.getStates().iterator(); + while (boundedness && it.hasNext()) { + CTMarking mark = (CTMarking) it.next(); + if (!mark.getOmegaPlaces().isEmpty()) { + boundedness = false; + } + } + return boundedness; + } + + /** + * Analyze boundedness without further checking of connection + * + * @param net + * net to be analyzed + * @param state + * Initial state (initial marking) + * @param covSet + * Coverability set + * @return NetAnalysisInformation about boundedness of this net + * @throws ExecutionException + * @throws InterruptedException + * @throws CancellationException + */ + private Object[] analyzeBoundednessAssumingConnection(PluginContext context, PetrinetGraph net, Marking state, + CoverabilitySet covSet, PetrinetSemantics semantics) throws ConnectionCannotBeObtained { + + // if there is an omega in coverability graph, the graph is not bounded + boolean boundedness = true; + SortedSet unboundedPlaces = new TreeSet(); + int bound = 0; + Iterator it = covSet.iterator(); + while (it.hasNext()) { + CTMarking mark = it.next(); + if (!mark.getOmegaPlaces().isEmpty()) { + boundedness = false; + unboundedPlaces.addAll(mark.getOmegaPlaces()); + } else { + for (Place p : mark) { + bound = Math.max(bound, mark.occurrences(p)); + } + } + } + + NetAnalysisInformation.BOUNDEDNESS boundednessRes = new NetAnalysisInformation.BOUNDEDNESS(); + UnboundedPlacesSet result2 = new UnboundedPlacesSet(); + result2.add(unboundedPlaces); + UnboundedSequences result3; + + if (boundedness) { + boundednessRes.setValue(UnDetBool.TRUE); + result3 = new UnboundedSequences(); + } else { + boundednessRes.setValue(UnDetBool.FALSE); + CoverabilityGraph cg = null; + cg = context.tryToFindOrConstructFirstObject(CoverabilityGraph.class, CoverabilityGraphConnection.class, + CoverabilityGraphConnection.STATEPACE, net, state, semantics); + + result3 = getUnboundedSequences(net, state, cg); + } + // add connection + return new Object[] { boundednessRes, result2, result3 }; + } + + private UnboundedSequences getUnboundedSequences(PetrinetGraph net, Marking initialState, + CoverabilityGraph coverabilityGraph) { + UnboundedSequences sequences = new UnboundedSequences(); + Collection greenStates = new HashSet(); + Collection yellowStates = new HashSet(); + Collection redStates = new HashSet(); + + /** + * First, color all states green. + */ + greenStates.addAll(coverabilityGraph.getNodes()); + /** + * Second, color all unbounded states and their predecessors red. + */ + for (State state : coverabilityGraph.getNodes()) { + CTMarking marking = (CTMarking) state.getIdentifier(); + if (marking.hasOmegaPlace()) { + colorBackwards(coverabilityGraph, state, redStates, greenStates, initialState); + } + } + /** + * Third, color all red predecessors of green states yellow. + */ + for (org.processmining.slpnminer.models.reachabilitygraph.Transition edge : coverabilityGraph + .getEdges()) { + if (greenStates.contains(edge.getTarget())) { + colorBackwards(coverabilityGraph, edge.getSource(), yellowStates, redStates, initialState); + } + } + + if (yellowStates.isEmpty()) { + /** + * No yellow states, hence no green state, hence unboundedness + * inevitable. Put all transitions in the sequence to visualize + * this. + */ + MultiSet sequence = new TreeMultiSet(); + sequence.addAll(net.getTransitions()); + sequences.add(sequence); + } else { + for (org.processmining.slpnminer.models.reachabilitygraph.Transition edge : coverabilityGraph + .getEdges()) { + if (yellowStates.contains(edge.getSource()) && redStates.contains(edge.getTarget())) { + MultiSet sequence = new TreeMultiSet(); + sequence.addAll((Marking) edge.getSource().getIdentifier()); + sequence.add((Transition) edge.getIdentifier()); + sequences.add(sequence); + } + } + } + + return sequences; + } + + private void colorBackwards(CoverabilityGraph graph, State state, Collection newCollection, + Collection oldCollection, Marking initialState) { + if (oldCollection.contains(state)) { + oldCollection.remove(state); + newCollection.add(state); + if (((Marking) state.getIdentifier()).compareTo(initialState) != 0) { + for (org.processmining.slpnminer.models.reachabilitygraph.Transition edge : graph + .getInEdges(state)) { + colorBackwards(graph, edge.getSource(), newCollection, oldCollection, initialState); + } + } + } + } + +} diff --git a/src/org/processmining/slpnminer/models/GraphVisualiserAlgorithm.java b/src/org/processmining/slpnminer/models/GraphVisualiserAlgorithm.java new file mode 100644 index 0000000..668d4ce --- /dev/null +++ b/src/org/processmining/slpnminer/models/GraphVisualiserAlgorithm.java @@ -0,0 +1,343 @@ +package org.processmining.slpnminer.models; + + +import java.awt.Color; +import java.util.HashMap; +import java.util.Map; + +import javax.swing.JComponent; + +import org.processmining.contexts.uitopia.UIPluginContext; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.slpnminer.parameters.GraphVisualiserParameters; +import org.processmining.models.graphbased.AttributeMap; +import org.processmining.models.graphbased.AttributeMap.ArrowType; +import org.processmining.models.graphbased.AttributeMapOwner; +import org.processmining.models.graphbased.ViewSpecificAttributeMap; +import org.processmining.models.graphbased.directed.DirectedGraph; +import org.processmining.models.graphbased.directed.DirectedGraphEdge; +import org.processmining.models.graphbased.directed.DirectedGraphNode; +import org.processmining.models.shapes.Diamond; +import org.processmining.models.shapes.Ellipse; +import org.processmining.models.shapes.Hexagon; +import org.processmining.models.shapes.Octagon; +import org.processmining.models.shapes.Polygon; +import org.processmining.models.shapes.Rectangle; +import org.processmining.models.shapes.RoundedRect; +import org.processmining.models.shapes.Shape; +import org.processmining.plugins.graphviz.colourMaps.ColourMap; +import org.processmining.plugins.graphviz.dot.Dot; +import org.processmining.plugins.graphviz.dot.DotEdge; +import org.processmining.plugins.graphviz.dot.DotNode; +import org.processmining.plugins.graphviz.visualisation.DotPanel; + +public class GraphVisualiserAlgorithm { + + public static String GVPLACELABEL = "[GV]PlaceLabel"; + + /** + * Create a JComponent using dot from the given graph. + * + * @param context + * The plug-in context. Not that relevant. + * @param graph + * The graph to visualize using dot. + * @return The JComponent containing the dot visualization of the graph. + */ + public JComponent apply(UIPluginContext context, + DirectedGraph> graph) { + return apply(((PluginContext) context), graph); + + } + + public JComponent apply(PluginContext context, + DirectedGraph> graph) { + return apply(context, graph, new ViewSpecificAttributeMap(), new GraphVisualiserParameters()); + + } + + public JComponent apply(UIPluginContext context, + DirectedGraph> graph, + GraphVisualiserParameters parameters) { + return apply(((PluginContext) context), graph, parameters); + + } + + public JComponent apply(PluginContext context, + DirectedGraph> graph, + GraphVisualiserParameters parameters) { + return apply(context, graph, new ViewSpecificAttributeMap(), parameters); + + } + + public JComponent apply(UIPluginContext context, + DirectedGraph> graph, + ViewSpecificAttributeMap map) { + return apply(((PluginContext) context), graph, map); + } + + public JComponent apply(PluginContext context, + DirectedGraph> graph, + ViewSpecificAttributeMap map) { + return apply(context, graph, map, new GraphVisualiserParameters()); + } + + public JComponent apply(UIPluginContext context, + DirectedGraph> graph, + ViewSpecificAttributeMap map, GraphVisualiserParameters parameters) { + return apply(((PluginContext) context), graph, map, parameters); + } + + public JComponent apply(PluginContext context, + DirectedGraph> graph, + ViewSpecificAttributeMap map, GraphVisualiserParameters parameters) { + Dot dot = new Dot(); + Map nodeMap = new HashMap(); + for (DirectedGraphNode node : graph.getNodes()) { + DotNode dotNode = dot.addNode(node.getLabel()); + nodeMap.put(node, dotNode); + apply(node, dotNode, map, parameters); + } + for (DirectedGraphEdge edge : graph.getEdges()) { + DotEdge dotEdge = dot.addEdge(nodeMap.get(edge.getSource()), nodeMap.get(edge.getTarget())); + apply(edge, dotEdge, map, parameters); + } + // NavigableSVGPanel panel = new AnimatableSVGPanel(DotPanel.dot2svg(dot)); + return new DotPanel(dot); + } + + /* + * Copy (as much as possible) the attributes from the JGraph node to the dot + * node. + */ + private void apply(DirectedGraphNode node, DotNode dotNode, ViewSpecificAttributeMap map, + GraphVisualiserParameters parameters) { + AttributeMap attMap = node.getAttributeMap(); + Shape shape = getShape(attMap, AttributeMap.SHAPE, null, node, map); + String style = "filled"; + if (shape != null) { + if (shape instanceof RoundedRect) { + dotNode.setOption("shape", "box"); + style = style + ",rounded"; + } else if (shape instanceof Rectangle) { + dotNode.setOption("shape", "box"); + } else if (shape instanceof Ellipse) { + Boolean isSquare = getBoolean(attMap, AttributeMap.SQUAREBB, false, node, map); + dotNode.setOption("shape", isSquare ? "circle" : "ellipse"); + } else if (shape instanceof Diamond) { + dotNode.setOption("shape", "diamond"); + } else if (shape instanceof Hexagon) { + dotNode.setOption("shape", "hexagon"); + } else if (shape instanceof Octagon) { + dotNode.setOption("shape", "octagon"); + } else if (shape instanceof Polygon) { + // attMap.get(AttributeMap.POLYGON_POINTS); + dotNode.setOption("shape", "polygon"); + } + dotNode.setOption("style", style); + } + Boolean showLabel = getBoolean(attMap, AttributeMap.SHOWLABEL, true, node, map); + // HV: Setting a tooltip seems to have no effect. + String label = getString(attMap, AttributeMap.LABEL, "", node, map); + String placeLabel = getString(attMap, GVPLACELABEL, "", node, map); + String tooltip = getString(attMap, AttributeMap.TOOLTIP, "", node, map); + String internalLabel = getFormattedString(parameters.getInternalLabelFormat(), label, placeLabel, tooltip); + String externalLabel = getFormattedString(parameters.getExternalLabelFormat(), label, placeLabel, tooltip); + tooltip = getFormattedString(parameters.getToolTipFormat(), label, placeLabel, tooltip); + if (showLabel) { + dotNode.setLabel(internalLabel); + } else { + dotNode.setLabel(""); + } + dotNode.setOption("xlabel", externalLabel); + dotNode.setOption("tooltip", tooltip); + + Float penWidth = getFloat(attMap, AttributeMap.LINEWIDTH, 1.0F, node, map); + dotNode.setOption("penwidth", "" + penWidth); + Color strokeColor = getColor(attMap, AttributeMap.STROKECOLOR, Color.BLACK, node, map); + dotNode.setOption("color", ColourMap.toHexString(strokeColor)); + Color labelColor = getColor(attMap, AttributeMap.LABELCOLOR, Color.BLACK, node, map); + dotNode.setOption("fontcolor", ColourMap.toHexString(labelColor)); + Color fillColor = getColor(attMap, AttributeMap.FILLCOLOR, Color.WHITE, node, map); + Color gradientColor = getColor(attMap, AttributeMap.GRADIENTCOLOR, fillColor, node, map); + if (gradientColor == null || gradientColor.equals(fillColor)) { + dotNode.setOption("fillcolor", ColourMap.toHexString(fillColor)); + } else { + dotNode.setOption("fillcolor", + ColourMap.toHexString(fillColor) + ":" + ColourMap.toHexString(gradientColor)); + } + + } + + /* + * Copy (as much as possible) the attributes from the JGraph edge to the dot + * edge. + */ + private void apply(DirectedGraphEdge edge, DotEdge dotEdge, ViewSpecificAttributeMap map, + GraphVisualiserParameters parameters) { + AttributeMap attMap = edge.getAttributeMap(); + Boolean showLabel = getBoolean(attMap, AttributeMap.SHOWLABEL, false, edge, map); + String label = getString(attMap, AttributeMap.LABEL, "", edge, map); + dotEdge.setLabel(showLabel ? label : ""); + dotEdge.setOption("dir", "both"); + ArrowType endArrowType = getArrowType(attMap, AttributeMap.EDGEEND, ArrowType.ARROWTYPE_CLASSIC, edge, map); + Boolean endIsFilled = getBoolean(attMap, AttributeMap.EDGEENDFILLED, false, edge, map); + switch (endArrowType) { + case ARROWTYPE_SIMPLE : + case ARROWTYPE_CLASSIC : + dotEdge.setOption("arrowhead", "open"); + break; + case ARROWTYPE_TECHNICAL : + dotEdge.setOption("arrowhead", endIsFilled ? "normal" : "empty"); + break; + case ARROWTYPE_CIRCLE : + dotEdge.setOption("arrowhead", endIsFilled ? "dot" : "odot"); + break; + case ARROWTYPE_LINE : + dotEdge.setOption("arrowhead", "tee"); + break; + case ARROWTYPE_DIAMOND : + dotEdge.setOption("arrowhead", endIsFilled ? "diamond" : "odiamond"); + break; + case ARROWTYPE_NONE : + dotEdge.setOption("arrowhead", "none"); + break; + default : + dotEdge.setOption("arrowhead", endIsFilled ? "box" : "obox"); + break; + } + ArrowType startArrowType = getArrowType(attMap, AttributeMap.EDGESTART, ArrowType.ARROWTYPE_NONE, edge, map); + Boolean startIsFilled = getBoolean(attMap, AttributeMap.EDGESTARTFILLED, false, edge, map); + dotEdge.setOption("arrowtail", "none"); + switch (startArrowType) { + case ARROWTYPE_SIMPLE : + case ARROWTYPE_CLASSIC : + dotEdge.setOption("arrowtail", "open"); + break; + case ARROWTYPE_TECHNICAL : + dotEdge.setOption("arrowtail", startIsFilled ? "normal" : "empty"); + break; + case ARROWTYPE_CIRCLE : + dotEdge.setOption("arrowtail", startIsFilled ? "dot" : "odot"); + break; + case ARROWTYPE_LINE : + dotEdge.setOption("arrowtail", "tee"); + break; + case ARROWTYPE_DIAMOND : + dotEdge.setOption("arrowtail", startIsFilled ? "diamond" : "odiamond"); + break; + case ARROWTYPE_NONE : + dotEdge.setOption("arrowtail", "none"); + break; + default : + dotEdge.setOption("arrowtail", startIsFilled ? "box" : "obox"); + break; + } + Float penWidth = getFloat(attMap, AttributeMap.LINEWIDTH, 1.0F, edge, map); + dotEdge.setOption("penwidth", "" + penWidth); + Color edgeColor = getColor(attMap, AttributeMap.EDGECOLOR, Color.BLACK, edge, map); + dotEdge.setOption("color", ColourMap.toHexString(edgeColor)); + Color labelColor = getColor(attMap, AttributeMap.LABELCOLOR, Color.BLACK, edge, map); + dotEdge.setOption("fontcolor", ColourMap.toHexString(labelColor)); + } + + /* + * The following methods get the attribute value from the JGraph object with + * a given default value. If the object has no such attribute, the default + * value will be returned. + */ + + private Boolean getBoolean(AttributeMap map, String key, Boolean value, AttributeMapOwner owner, + ViewSpecificAttributeMap m) { + Object obj = m.get(owner, key, null); + if (obj != null && obj instanceof Boolean) { + return (Boolean) obj; + } + obj = map.get(key); + if (obj != null && obj instanceof Boolean) { + return (Boolean) obj; + } + return value; + } + + private Float getFloat(AttributeMap map, String key, Float value, AttributeMapOwner owner, + ViewSpecificAttributeMap m) { + Object obj = m.get(owner, key, null); + if (obj != null && obj instanceof Float) { + return (Float) obj; + } + obj = map.get(key); + if (obj != null && obj instanceof Float) { + return (Float) obj; + } + return value; + } + + private String getString(AttributeMap map, String key, String value, AttributeMapOwner owner, + ViewSpecificAttributeMap m) { + Object obj = m.get(owner, key, null); + if (obj != null && obj instanceof String) { + /* + * Some labels contain HTML mark-up. Remove as much as possible. + */ + String s1 = ((String) obj).replaceAll("
", "\\\\n"); + String s2 = s1.replaceAll("<[^>]*>", ""); + return s2; + } + obj = map.get(key); + if (obj != null && obj instanceof String) { + /* + * Some labels contain HTML mark-up. Remove as much as possible. + */ + String s1 = ((String) obj).replaceAll("
", "\\\\n"); + String s2 = s1.replaceAll("<[^>]*>", ""); + return s2; + } + return value; + } + + private Color getColor(AttributeMap map, String key, Color value, AttributeMapOwner owner, + ViewSpecificAttributeMap m) { + Object obj = m.get(owner, key, null); + if (obj != null && obj instanceof Color) { + return (Color) obj; + } + obj = map.get(key); + if (obj != null && obj instanceof Color) { + return (Color) obj; + } + return value; + } + + private ArrowType getArrowType(AttributeMap map, String key, ArrowType value, AttributeMapOwner owner, + ViewSpecificAttributeMap m) { + Object obj = m.get(owner, key, null); + if (obj != null && obj instanceof ArrowType) { + return (ArrowType) obj; + } + obj = map.get(key); + if (obj != null && obj instanceof ArrowType) { + return (ArrowType) obj; + } + return value; + } + + private Shape getShape(AttributeMap map, String key, Shape value, AttributeMapOwner owner, + ViewSpecificAttributeMap m) { + Object obj = m.get(owner, key, null); + if (obj != null && obj instanceof Shape) { + return (Shape) obj; + } + obj = map.get(key); + if (obj != null && obj instanceof Shape) { + return (Shape) obj; + } + return value; + } + + private String getFormattedString(String format, String label, String placeLabel, String tooltip) { + String shortPlaceLabel = (placeLabel.length() > 5 ? placeLabel.substring(0, 4) : placeLabel); + return format.replace("%l", label).replace("%p", placeLabel).replace("%s", shortPlaceLabel).replace("%t", + tooltip); + } +} diff --git a/src/org/processmining/slpnminer/models/MinedReachabilityGraph.java b/src/org/processmining/slpnminer/models/MinedReachabilityGraph.java new file mode 100644 index 0000000..13fc138 --- /dev/null +++ b/src/org/processmining/slpnminer/models/MinedReachabilityGraph.java @@ -0,0 +1,123 @@ +package org.processmining.slpnminer.models; + +import javax.swing.JComponent; +import javax.swing.SwingConstants; + +import org.processmining.contexts.uitopia.annotations.Visualizer; +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.connections.ConnectionManager; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginLevel; +import org.processmining.models.connections.transitionsystem.TransitionSystemConnection; +import org.processmining.models.graphbased.AttributeMap; +import org.processmining.models.graphbased.directed.DirectedGraphElementWeights; + +import org.processmining.models.jgraph.ProMJGraphVisualizer; +import org.processmining.models.jgraph.visualization.ProMJGraphPanel; +import org.processmining.plugins.transitionsystem.miner.TSMinerInput; +import org.processmining.slpnminer.models.reachabilitygraph.AcceptStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.StartStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.State; +import org.processmining.slpnminer.models.reachabilitygraph.Transition; + +public class MinedReachabilityGraph { + + private static final float[] INITIAL_DASH_PATTERN = { 7f }; + + @Plugin(name = "@0 Show Mined Reachability Graph", + returnLabels = { "Visualization of Mined Reachability Graph" }, + returnTypes = { JComponent.class }, + parameterLabels = { "Reachability graph" }, userAccessible = true) + @Visualizer + public JComponent visualize(PluginContext context, ReachabilityGraph ts) { + /** + * Will hold the weights, start states, and accept states. + */ + DirectedGraphElementWeights weights = new DirectedGraphElementWeights(); + StartStateSet starts = new StartStateSet(); + AcceptStateSet accepts = new AcceptStateSet(); + TSMinerInput settings; + ProMJGraphPanel mainPanel; + + settings = null; + + /** + * 1. Tries to get connected transition weights from the framework. + */ + ConnectionManager cm = context.getConnectionManager(); + try { + // System.out.println("Checking for connection"); + TransitionSystemConnection tsc = cm.getFirstConnection(TransitionSystemConnection.class, context, ts); + // System.out.println("Checked for connection: " + settings); + if (tsc.hasWeights()) { + weights = tsc.getObjectWithRole(TransitionSystemConnection.WEIGHTS); + } + starts = tsc.getObjectWithRole(TransitionSystemConnection.STARTIDS); + accepts = tsc.getObjectWithRole(TransitionSystemConnection.ACCEPTIDS); + if (tsc.hasSettings()) { + settings = (TSMinerInput) tsc.getObjectWithRole(TransitionSystemConnection.SETTINGS); + } + + } catch (ConnectionCannotBeObtained e) { + /** + * No connected transition weights found, no problem. + */ + } + + /** + * 2. Based on the connected objects found: updates visualization. + */ + if (!weights.isEmpty()) { + /** + * Set the line widths according to the weights. To avoid getting + * ridiculous line widths: linewidth=ln(weight). + */ + for (State state : ts.getNodes()) { + state.getAttributeMap().put(AttributeMap.LINEWIDTH, + new Float(1 + Math.log(Math.E) * Math.log(weights.get(state.getIdentifier(), 1)))); + } + for (Transition trans : ts.getEdges()) { + trans.getAttributeMap().put( + AttributeMap.LINEWIDTH, + new Float(1 + + Math.log(Math.E) + * Math.log(weights.get(trans.getSource().getIdentifier(), trans.getTarget() + .getIdentifier(), trans.getIdentifier(), 1)))); + } + } + if (!starts.isEmpty() || !accepts.isEmpty()) { + for (State state : ts.getNodes()) { + + /** + * Note that, in fact, the set of start states is the the set of + * start state ids. + */ + if (starts.contains(state.getIdentifier())) { + /** + * This state is a start state. + */ + state.getAttributeMap().put(AttributeMap.DASHPATTERN, INITIAL_DASH_PATTERN); + } + if (accepts.contains(state.getIdentifier())) { + /** + * This state is an accept state. + */ + state.setAccepting(true); + } + } + } + + mainPanel = ProMJGraphVisualizer.instance().visualizeGraph(context, ts); + mainPanel.getGraph().setEditable(false); + + + if (settings != null) { + mainPanel.addViewInteractionPanel(new MinedRg(context, ts, settings, mainPanel), + SwingConstants.SOUTH); + } + + return mainPanel; + } +} diff --git a/src/org/processmining/slpnminer/models/MinedRg.java b/src/org/processmining/slpnminer/models/MinedRg.java new file mode 100644 index 0000000..5c92e50 --- /dev/null +++ b/src/org/processmining/slpnminer/models/MinedRg.java @@ -0,0 +1,148 @@ +package org.processmining.slpnminer.models; + +import info.clearthought.layout.TableLayout; + +import java.awt.Component; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.Collection; +import java.util.HashSet; + +import javax.swing.BorderFactory; +import javax.swing.JComponent; +import javax.swing.JLabel; +import javax.swing.JPanel; +import javax.swing.SwingConstants; + +import org.deckfour.xes.model.XLog; +import org.processmining.contexts.uitopia.UIPluginContext; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.util.ui.scalableview.ScalableComponent; +import org.processmining.framework.util.ui.scalableview.ScalableViewPanel; +import org.processmining.framework.util.ui.scalableview.interaction.ViewInteractionPanel; +import org.processmining.models.graphbased.directed.DirectedGraphEdge; +import org.processmining.models.graphbased.directed.DirectedGraphNode; +import org.processmining.models.jgraph.visualization.ProMJGraphPanel; +import org.processmining.plugins.transitionsystem.miner.TSMiner; +import org.processmining.plugins.transitionsystem.miner.TSMinerInput; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; + +import com.fluxicon.slickerbox.components.NiceIntegerSlider; +import com.fluxicon.slickerbox.components.SlickerButton; + +public class MinedRg extends JPanel implements ActionListener, ViewInteractionPanel { + + /** + * + */ + private static final long serialVersionUID = -3710929196431016518L; + + private TSMinerInput settings; + private PluginContext context; + private ProMJGraphPanel parentPanel; + private NiceIntegerSlider slider; + private SlickerButton filterButton; + private ReachabilityGraph reachabilityGraph; + private JPanel componentPanel; + private JLabel label; + private int maxValue; + + public MinedRg(PluginContext context, ReachabilityGraph ts, TSMinerInput settings, ProMJGraphPanel panel) { + + this.context = context; + reachabilityGraph = ts; + this.settings = settings; + parentPanel = panel; + + double size[][] = { { 400, 100 }, { 25, 10, 25 } }; + componentPanel = new JPanel(); + componentPanel.setLayout(new TableLayout(size)); + componentPanel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5)); + componentPanel.setOpaque(true); + + maxValue = 1; + slider = new NiceIntegerSlider("Coverage", 0, Math.max(maxValue, 1), 1); + componentPanel.add(slider, "0, 0, 1, 0"); + slider.setVisible(false); + + filterButton = new SlickerButton("Filter log"); + filterButton.setFont(filterButton.getFont().deriveFont(11f)); + filterButton.setBorder(BorderFactory.createEmptyBorder(2, 10, 2, 12)); + filterButton.setOpaque(false); + filterButton.setAlignmentX(Component.LEFT_ALIGNMENT); + filterButton.setHorizontalAlignment(SwingConstants.LEFT); + filterButton.addActionListener(this); + filterButton.setToolTipText("Create a log containing those traces that reach the threshold."); + componentPanel.add(filterButton, "1, 2"); + filterButton.setVisible(false); + + label = new JLabel("Please, first select some elements."); + componentPanel.add(label, "0, 0, 1, 2"); + label.setVisible(true); + } + + @Override + public JComponent getComponent() { + // TODO Auto-generated method stub + return componentPanel; + } + + @Override + public double getHeightInView() { + // TODO Auto-generated method stub + return 60; + } + + @Override + public String getPanelName() { + // TODO Auto-generated method stub + return "Filter log"; + } + + @Override + public double getWidthInView() { + // TODO Auto-generated method stub + return 500; + } + + @Override + public void setParent(ScalableViewPanel viewPanel) { + // TODO Auto-generated method stub + } + + @Override + public void setScalableComponent(ScalableComponent scalable) { + // TODO Auto-generated method stub + + } + + @Override + public void willChangeVisibility(boolean to) { + // TODO Auto-generated method stub + maxValue = parentPanel.getSelectedElements().size(); + /* + * NiceIntegerSlider does not provide a SetMaximum. + * As a result, we need to replace the NiceIntegerSlider with a brand new one. + */ + componentPanel.remove(slider); + slider = new NiceIntegerSlider("Threshold for number of selected elements", 0, Math.max(maxValue, 1), 1); + componentPanel.add(slider, "0, 0, 1, 0"); + /* + * Set the proper visibilities. + */ + slider.setVisible(maxValue > 0); + filterButton.setVisible(maxValue > 0); + label.setVisible(maxValue <= 0); + } + + @Override + public void updated() { + // TODO Auto-generated method stub + + } + + @Override + public void actionPerformed(ActionEvent e) { + } + +} diff --git a/src/org/processmining/slpnminer/models/StochasticNet.java b/src/org/processmining/slpnminer/models/StochasticNet.java new file mode 100644 index 0000000..860d660 --- /dev/null +++ b/src/org/processmining/slpnminer/models/StochasticNet.java @@ -0,0 +1,196 @@ +package org.processmining.slpnminer.models; + +import java.util.Collection; + +import org.apache.commons.math3.distribution.NormalDistribution; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.ResetNet; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.DistributionType; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.ExecutionPolicy; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet.TimeUnit; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; + +public interface StochasticNet extends ResetNet, Petrinet { + + public static final String PARAMETER_LABEL = "Stochastic Petri Net"; + + /** + * Supported parametric and non-parametric distributions + */ + public enum DistributionType { + // parametric continuous distributions + BETA, EXPONENTIAL, NORMAL, LOGNORMAL, GAMMA, STUDENT_T, UNIFORM, WEIBULL, + // nonparametric continuous distributions + GAUSSIAN_KERNEL, HISTOGRAM, LOGSPLINE, BERNSTEIN_EXPOLYNOMIAL, + // immediate transitions + IMMEDIATE, + // a deterministic transition (e.g. takes always exactly 5 time units) + DETERMINISTIC, + // time series distribution + SINUSOIDAL_SERIES, ARMA_SERIES, + // undefined + UNDEFINED; + + public static DistributionType fromString(String text) { + if (text == null) { + return UNDEFINED; + } + for (DistributionType dType : DistributionType.values()) { + if (text.equalsIgnoreCase(dType.toString())) { + return dType; + } + } + return UNDEFINED; + } + } + + /** + * Execution policy of the network. + * + * @see paper: + * Ajmone Marsan, M., et al. "The effect of execution policies on the semantics and analysis of stochastic Petri nets." Software Engineering, IEEE Transactions on 15.7 (1989): 832-846. + */ + public enum ExecutionPolicy { + GLOBAL_PRESELECTION("global preselection"), RACE_RESAMPLING("race (resampling)"), + RACE_ENABLING_MEMORY("race (enabling memory)"), RACE_AGE_MEMORY("race (age memory)"); + + private String name; + + public String shortName() { + switch (this) { + case GLOBAL_PRESELECTION: + return "preSel"; + case RACE_AGE_MEMORY: + return "raceAge"; + case RACE_ENABLING_MEMORY: + return "raceEnabl"; + case RACE_RESAMPLING: + return "raceResampl"; + } + return null; + } + + ExecutionPolicy(String name) { + this.name = name; + } + + public String toString() { + return name; + } + + public static ExecutionPolicy fromString(String value) { + for (ExecutionPolicy ep : ExecutionPolicy.values()) { + if (ep.toString().equalsIgnoreCase(value)) { + return ep; + } + } + return RACE_ENABLING_MEMORY; + } + } + + /** + * Enumeration specifying in which time unit the parameters of the net are given. + *

+ * For example, if a timed transition in a {@link StochasticNet} has a duration distribution of + * {@link NormalDistribution}(10,2), this tells us, if it takes usually 10 seconds, or 10 hours to complete. + */ + public enum TimeUnit { + NANOSECONDS("nanoseconds"), MICROSECONDS("microseconds"), MILLISECONDS("milliseconds"), + SECONDS("seconds"), MINUTES("minutes"), HOURS("hours"), + DAYS("days"), WEEKS("weeks"), YEARS("years"), UNSPECIFIED("unspecified"); + + private String stringValue; + + TimeUnit(String string) { + this.stringValue = string; + } + + public String toString() { + return stringValue; + } + + public static TimeUnit fromString(String s) { + for (TimeUnit tu : TimeUnit.values()) { + if (tu.toString().equalsIgnoreCase(s)) { + return tu; + } + } + return UNSPECIFIED; + } + + /** + * Returns the conversion factor + * + * @return + */ + public double getUnitFactorToMillis() { + switch (this) { + case NANOSECONDS: + return 1. / 1000000; // 1 / (1000 * 1000) ms is a nanosecond + case MICROSECONDS: + return 1. / 1000; // 1/1000 ms is a microsecond + case MILLISECONDS: + return 1; // nothing to convert + case SECONDS: + return 1000; // 1000 ms is a second + case MINUTES: + return 60000; // 1000 * 60 ms is a minute + case HOURS: + return 3600000; // 1000 * 60 * 60 ms is an hour + case DAYS: + return 86400000; // 1000 * 60 * 60 * 24 ms is a day + case WEEKS: + return 604800000; // 1000 * 60 * 60 * 24 * 7 ms is a week + case YEARS: + return 31536000000.; // 1000 * 60 * 60 * 24 * 7 * 365 ms is a day + default: + return 1; // unspecified case = milliseconds + } + } + } + + /** + * Returns the execution policy (see {@link ExecutionPolicy}) of the net. + * + * @return {@link ExecutionPolicy} + */ + public ExecutionPolicy getExecutionPolicy(); + + /** + * Sets the execution policy of the net. + * + * @param policy {@link ExecutionPolicy} + */ + public void setExecutionPolicy(ExecutionPolicy policy); + + /** + * The time unit used in the stochastic net + * + * @return {@link TimeUnit} + */ + public TimeUnit getTimeUnit(); + + /** + * Sets the time unit of the net + * + * @param timeUnit {@link TimeUnit} + */ + public void setTimeUnit(TimeUnit timeUnit); + + public Collection getTimedTransitions(); + + // immediate transitions + public TimedTransition addImmediateTransition(String label); + + public TimedTransition addImmediateTransition(String label, double weight); + + public TimedTransition addImmediateTransition(String label, double weight, int priority, String trainingData); + + // timed transitions + public TimedTransition addTimedTransition(String label, DistributionType type, double... distributionParameters); + + public TimedTransition addTimedTransition(String label, double weight, DistributionType type, double... distributionParameters); + + public TimedTransition addTimedTransition(String label, double weight, DistributionType type, String trainingData, double... distributionParameters); +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/StochasticNetImpl.java b/src/org/processmining/slpnminer/models/StochasticNetImpl.java new file mode 100644 index 0000000..77efb3a --- /dev/null +++ b/src/org/processmining/slpnminer/models/StochasticNetImpl.java @@ -0,0 +1,197 @@ +package org.processmining.slpnminer.models; + +import org.processmining.models.graphbased.AttributeMap; +import org.processmining.models.graphbased.directed.DirectedGraphElement; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; +import org.processmining.models.graphbased.directed.petrinet.elements.*; + +import javax.swing.*; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +public class StochasticNetImpl extends AbstractResetInhibitorNet implements StochasticNet { + + // maintain a set of all nodes for quicker access: + private Set nodes; + private Set> edges; + private Collection timedTransLst= new ArrayList(); + + private ExecutionPolicy executionPolicy; + private TimeUnit timeUnit; + + public StochasticNetImpl(String label) { + super(true, false); + getAttributeMap().put(AttributeMap.PREF_ORIENTATION, SwingConstants.WEST); + getAttributeMap().put(AttributeMap.LABEL, label); + nodes = new HashSet(); + edges = new HashSet>(); + } + + protected StochasticNetImpl getEmptyClone() { + return new StochasticNetImpl(getLabel()); + } + + + public TimedTransition addImmediateTransition(String label) { + return addImmediateTransition(label, 1); + } + + public TimedTransition addImmediateTransition(String label, double weight) { + return addImmediateTransition(label, weight, 1, null); + } + + public TimedTransition addImmediateTransition(String label, double weight, int priority, String trainingData) { + TimedTransition t = new TimedTransition(label, this, null, weight, priority, DistributionType.IMMEDIATE, null); + t.setTrainingData(trainingData); + transitions.add(t); + nodes.add(t); + graphElementAdded(t); + return t; + } + + public TimedTransition addTimedTransition(String label, DistributionType type, double... distributionParameters) { + return addTimedTransition(label, 1, type, distributionParameters); + } + + public TimedTransition addTimedTransition(String label, double weight, DistributionType type, + double... distributionParameters) { + return addTimedTransition(label, weight, type, "", distributionParameters); + } + + public TimedTransition addTimedTransition(String label, double weight, DistributionType type, + String trainingData, double... distributionParameters) { + TimedTransition t = new TimedTransition(label, this, null, weight, 0, type, distributionParameters); + transitions.add(t); + nodes.add(t); + timedTransLst.add(t); + graphElementAdded(t); + t.setTrainingData(trainingData); + return t; + } + + public Collection getTimedTransitions(){ + return timedTransLst; + } + + /** + * Replaces {@link Transition}s by {@link TimedTransition}s + */ + @Override + protected synchronized Map cloneFrom(AbstractResetInhibitorNet net, + boolean transitions, boolean places, boolean arcs, boolean resets, boolean inhibitors) { + Map mapping = new HashMap(); + + if (transitions) { + for (Transition t : net.transitions) { + TimedTransition copy = addTimedTransition(t.getLabel(), DistributionType.UNDEFINED); + copy.setInvisible(t.isInvisible()); + mapping.put(t, copy); + } + } + if (places) { + for (Place p : net.places) { + Place copy = addPlace(p.getLabel()); + mapping.put(p, copy); + } + } + if (arcs) { + for (Arc a : net.arcs) { + mapping.put(a, addArcPrivate((PetrinetNode) mapping.get(a.getSource()), (PetrinetNode) mapping.get(a + .getTarget()), a.getWeight(), a.getParent())); + } + } + if (inhibitors) { + for (InhibitorArc a : net.inhibitorArcs) { + mapping.put(a, addInhibitorArc((Place) mapping.get(a.getSource()), (Transition) mapping.get(a + .getTarget()), a.getLabel())); + } + } + if (resets) { + for (ResetArc a : net.resetArcs) { + mapping.put(a, addResetArc((Place) mapping.get(a.getSource()), (Transition) mapping.get(a.getTarget()), + a.getLabel())); + } + } + getAttributeMap().clear(); + AttributeMap map = net.getAttributeMap(); + for (String key : map.keySet()) { + getAttributeMap().put(key, map.get(key)); + } + + return mapping; + } + + // public synchronized Place addPlace(String label, ExpandableSubNet parent) { +// Place p = super.addPlace(label, parent); +// nodes.add(p); +// return p; +// } +// public synchronized Transition addTransition(String label, ExpandableSubNet parent) { +// Transition t = super.addTransition(label, parent); +// nodes.add(t); +// return t; +// } +// public synchronized Transition removeTransition(Transition transition) { +// Transition toRemove = super.removeTransition(transition); +// nodes.remove(toRemove); +// return toRemove; +// } +// public synchronized Place removePlace(Place place) { +// Place toRemove = super.removePlace(place); +// nodes.remove(toRemove); +// return toRemove; +// } + @Override + public void graphElementAdded(Object element) { + if (element instanceof PetrinetNode) { + PetrinetNode node = (PetrinetNode) element; + nodes.add(node); + } + if (element instanceof PetrinetEdge) { + edges.add((PetrinetEdge) element); + } + super.graphElementAdded(element); + } + + public void graphElementRemoved(Object element) { + if (element instanceof PetrinetNode) { + PetrinetNode node = (PetrinetNode) element; + nodes.remove(node); + } + if (element instanceof PetrinetEdge) { + edges.remove(element); + } + super.graphElementRemoved(element); + } + + public synchronized Set getNodes() { + return nodes; + } + + public synchronized Set> getEdges() { + return edges; + } + + public ExecutionPolicy getExecutionPolicy() { + return executionPolicy; + } + + public void setExecutionPolicy(ExecutionPolicy executionPolicy) { + this.executionPolicy = executionPolicy; + } + + public TimeUnit getTimeUnit() { + return timeUnit; + } + + public void setTimeUnit(TimeUnit timeUnit) { + this.timeUnit = timeUnit; + } +} diff --git a/src/org/processmining/slpnminer/models/StochasticPetrinetSemantics.java b/src/org/processmining/slpnminer/models/StochasticPetrinetSemantics.java new file mode 100644 index 0000000..11b8967 --- /dev/null +++ b/src/org/processmining/slpnminer/models/StochasticPetrinetSemantics.java @@ -0,0 +1,10 @@ +package org.processmining.slpnminer.models; + + +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.Marking; + +public interface StochasticPetrinetSemantics extends Semantics { + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/StochasticPetrinetSemanticsFactory.java b/src/org/processmining/slpnminer/models/StochasticPetrinetSemanticsFactory.java new file mode 100644 index 0000000..5407624 --- /dev/null +++ b/src/org/processmining/slpnminer/models/StochasticPetrinetSemanticsFactory.java @@ -0,0 +1,14 @@ +package org.processmining.slpnminer.models; + +import org.processmining.models.graphbased.directed.petrinet.StochasticNet; + +public class StochasticPetrinetSemanticsFactory{ + + StochasticPetrinetSemanticsFactory(){ + } + + public static StochasticPetrinetSemantics stochasticNetSemantics(Class net) { + return new StochasticPetrinetSemanticsImpl(); + } + +} diff --git a/src/org/processmining/slpnminer/models/StochasticPetrinetSemanticsImpl.java b/src/org/processmining/slpnminer/models/StochasticPetrinetSemanticsImpl.java new file mode 100644 index 0000000..f77d32a --- /dev/null +++ b/src/org/processmining/slpnminer/models/StochasticPetrinetSemanticsImpl.java @@ -0,0 +1,41 @@ +package org.processmining.slpnminer.models; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; + +import org.processmining.framework.providedobjects.SubstitutionType; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.elements.Arc; +import org.processmining.models.graphbased.directed.petrinet.elements.InhibitorArc; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.ResetArc; +import org.processmining.models.graphbased.directed.petrinet.elements.TimedTransition; +import org.processmining.models.semantics.ExecutionInformation; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.models.semantics.petrinet.PetrinetExecutionInformation; +import org.processmining.models.semantics.petrinet.PetrinetSemantics; + +import entropic.AbstractStochasticNetSemantics; + +@SubstitutionType(substitutedType = StochasticPetrinetSemantics.class) +public class StochasticPetrinetSemanticsImpl extends AbstractStochasticNetSemantics implements StochasticPetrinetSemantics { + /** + * + */ + private static final long serialVersionUID = 1863753685175892937L; + + public StochasticPetrinetSemanticsImpl() { + } + + public ExecutionInformation executeTransition(TimedTransition toExecute) { + Marking required = getRequired(toExecute); + Marking removed = state.minus(required); + Marking produced = getProduced(toExecute); + state.addAll(produced); + + return new PetrinetExecutionInformation(required, removed, produced, toExecute); + } +} diff --git a/src/org/processmining/slpnminer/models/StochasticReachabilityGraph.java b/src/org/processmining/slpnminer/models/StochasticReachabilityGraph.java new file mode 100644 index 0000000..9052766 --- /dev/null +++ b/src/org/processmining/slpnminer/models/StochasticReachabilityGraph.java @@ -0,0 +1,268 @@ +//package org.processmining.slpnminer.models; +// +//import org.apache.commons.collections15.MapIterator; +//import org.apache.commons.collections15.keyvalue.MultiKey; +//import org.apache.commons.collections15.map.MultiKeyMap; +//import org.processmining.models.graphbased.directed.*; +//import org.processmining.slpnminer.models.reachabilitygraph.State; +//import org.processmining.slpnminer.models.reachabilitygraph.Transition; +// +//import javax.swing.*; +//import java.util.*; +// +// +//public class StochasticReachabilityGraph extends AbstractDirectedGraph implements TransitionSystem { +// +// private final Map states = new LinkedHashMap(); +// +// @SuppressWarnings("rawtypes") +// private final MultiKeyMap t = new MultiKeyMap(); //3 keys: and a value , but is set to generics +// +// private Map proxyMap; +// +// public StochasticReachabilityGraph(String label) { +// super(); +// getAttributeMap().put(AttributeMap.LABEL, label); +// getAttributeMap().put(AttributeMap.PREF_ORIENTATION, SwingConstants.NORTH); +// +// proxyMap = new HashMap(); +// } +// +// @Override +// protected synchronized Map cloneFrom( +// DirectedGraph graph) { +// assert (graph instanceof StochasticReachabilityGraph); +// Map mapping = new HashMap(); +// +// StochasticReachabilityGraph ts = (StochasticReachabilityGraph) graph; +// for (Object identifier : ts.states.keySet()) { +// addState(identifier); +// mapping.put(ts.states.get(identifier), getNode(identifier)); +// } +//// for (Transition trans : getEdges()) { +// for (Transition trans : ts.getEdges()) { +// addTransition(trans.getSource().getIdentifier(), trans.getTarget().getIdentifier(), trans.getIdentifier(), +// trans.getVisibility()); +// mapping.put(trans, findTransition(trans.getSource().getIdentifier(), trans.getTarget().getIdentifier(), +// trans.getIdentifier())); +// } +// return mapping; +// } +// +// @Override +// protected synchronized AbstractDirectedGraph getEmptyClone() { +// return new StochasticReachabilityGraph(getLabel()); +// } +// +// /** +// * Node/State Handling +// * +// */ +// +// public synchronized boolean addState(Object identifier) { +// if (!states.containsKey(identifier)) { +// State state = new State(identifier, this); +// states.put(identifier, state); +// graphElementAdded(state); +// return true; +// } else { +// return false; +// } +// } +// +// public synchronized boolean addState(Object identifier, Boolean isInitial,Boolean isAccept) { +// if (!states.containsKey(identifier)) { +// State state = new State(identifier, this); +// state.setInitiating(isInitial); +// state.setAccepting(isAccept); +// states.put(identifier, state); +// graphElementAdded(state); +// return true; +// } else { +// return false; +// } +// } +// +// public synchronized void removeNode(DirectedGraphNode node) { +// if (node instanceof State) { +// removeState(node); +// } else { +// assert (false); +// } +// } +// +// public synchronized Set getNodes() { +// Set nodes = new HashSet(); +// nodes.addAll(states.values()); +// return nodes; +// } +// +// public synchronized Collection getStates() { +// return states.keySet(); +// } +// +// +// public synchronized Object removeState(Object state) { +// if (state instanceof State) { +// Object removed = states.get(((State) state).getIdentifier()); +// states.remove(((State) state).getIdentifier()); +// graphElementRemoved(removed); +// return removed; +// } +// else return null; +// } +// +// public synchronized State getNode(Object identifier) { +// return states.get(getProxy(identifier)); +// } +// +// public synchronized Boolean containsNode(Object identifier) { +// return states.containsKey(getProxy(identifier)); +// } +// +// /** +// * Edge/Transition Handling +// * +// */ +// +// @SuppressWarnings("unchecked") +// public synchronized boolean addTransition(Object fromState, Object toState, Object identifier, String label, Boolean isInvisible) { +// State source = getNode(fromState); +// State target = getNode(toState); +// checkAddEdge(source, target); +// +// if (!t.containsKey(source, target, identifier)) { +// Transition transition = new Transition(source, target, identifier, label, isInvisible); +// t.put(source, target, identifier, transition); //Stores the Source State, Target State and Object identifier as keys of a Transition. +// graphElementAdded(transition); +// target.addPreTrans(transition); // add this previous transition to target state. +// return true; +// } else { +// return false; +// } +// } +// +// public synchronized Transition addAndReturnTransition(Object fromState, Object toState, Object identifier, String label, Boolean isInvisible) { +// State source = getNode(fromState); +// State target = getNode(toState); +// checkAddEdge(source, target); +// +// if (!t.containsKey(source, target, identifier)) { +// Transition transition = new Transition(source, target, identifier, label, isInvisible); +// t.put(source, target, identifier, transition); //Stores the Source State, Target State and Object identifier as keys of a Transition. +// graphElementAdded(transition); +// target.addPreTrans(transition); // add this previous transition to target state. +// return transition; +// } else { +// return null; +// } +// } +// +// @SuppressWarnings("unchecked") +// public synchronized Object removeTransition(Object fromState, Object toState, Object identifier) { +// Object removed = t.get(getNode(fromState), getNode(toState), identifier); +// t.remove(getNode(fromState), getNode(toState), identifier); +// graphElementRemoved(removed); +// return removed; +// } +// +// @SuppressWarnings("unchecked") +// @Override +// public synchronized void removeEdge(@SuppressWarnings("rawtypes") DirectedGraphEdge edge) { +// assert (edge instanceof Transition); +// Transition transition = (Transition) edge; +// t.remove(transition.getSource(),transition.getTarget(),transition.getIdentifier()); +// } +// +// public synchronized Collection getTransitions() { +// Set keys = new LinkedHashSet(); +// for(Object trans : t.values()) +// keys.add(((Transition) trans).getIdentifier()); +// return keys; +// } +// +// @SuppressWarnings("unchecked") +// public synchronized Collection getTransitionObjects() { +// return t.values(); +// } +// +// @SuppressWarnings("unchecked") +// public synchronized Set getEdges() { +// Set result = new HashSet(); +// result.addAll(t.values()); +// return Collections.unmodifiableSet(result); +// } +// +// @SuppressWarnings("rawtypes") +// public Collection getEdges(Object identifier) { +// Set collection = new LinkedHashSet(); +// MapIterator iterator = t.mapIterator(); +// while(iterator.hasNext()) +// { +// iterator.next(); +// MultiKey key = (MultiKey) iterator.getKey(); +// if(identifier.equals(key.getKey(2))){ +// collection.add((Transition) t.get(key)); +// } +// } +// return collection; +// } +// +// @SuppressWarnings("unchecked") +// public synchronized Transition findTransition(Object fromState, Object toState, Object identifier) { +// return (Transition) t.get(getNode(fromState),getNode(toState),identifier); +// } +// +// +// public void putProxy(Object obj, Object proxy) { +// proxyMap.put(obj, proxy); +// } +// +// private Object getProxy(Object obj) { +// while (proxyMap.containsKey(obj)) { +// obj = proxyMap.get(obj); +// } +// return obj; +// } +// +// public void addProxyMap(StochasticReachabilityGraph ts) { +// for (Object key: ts.proxyMap.keySet()) { +// proxyMap.put(key, ts.proxyMap.get(key)); +// } +// } +// +// public HashSet getReachableState(){ +// +// +// HashSet reachableStateSet = new HashSet(); +// +// for (State state : getNodes()) { +// if(state.isAccepting()) { +// reachableStateSet.add(state); +// reachableStateSet = getPrecedenceState(state, reachableStateSet); +// } +// } +// return reachableStateSet; +// } +// +// public HashSet getPrecedenceState(State state, HashSet reachableStateSet){ +// for (Transition t: state.getPreTransSet()) { +// if(reachableStateSet.contains(t.getSource())) { +// continue; +// } +// reachableStateSet.add(t.getSource()); +// +// if(!t.getSource().isInitiating()) { +// reachableStateSet = getPrecedenceState(t.getSource(), reachableStateSet); +// } +// } +// return reachableStateSet; +// } +// +// @Override +// public boolean addTransition(Object fromState, Object toState, Object identifier, Boolean isInvisible) { +// // TODO Auto-generated method stub +// return false; +// } +// +//} diff --git a/src/org/processmining/slpnminer/models/TransitionSystem.java b/src/org/processmining/slpnminer/models/TransitionSystem.java new file mode 100644 index 0000000..67cdefb --- /dev/null +++ b/src/org/processmining/slpnminer/models/TransitionSystem.java @@ -0,0 +1,32 @@ +package org.processmining.slpnminer.models; + +import java.util.Collection; +import org.processmining.models.graphbased.directed.DirectedGraph; +import org.processmining.slpnminer.models.reachabilitygraph.State; +import org.processmining.slpnminer.models.reachabilitygraph.Transition; + +public interface TransitionSystem extends DirectedGraph { + + String getLabel(); + + // transitions + boolean addTransition(Object fromState, Object toState, Object identifier, Boolean isInvisible); + + Object removeTransition(Object fromState, Object toState, Object identifier); + + Collection getTransitions(); + + Collection getEdges(Object identifier); + + // states + boolean addState(Object identifier); + + Object removeState(Object identifier); + + Collection getStates(); + + State getNode(Object identifier); + + Transition findTransition(Object fromState, Object toState, Object identifier); + +} diff --git a/src/org/processmining/slpnminer/models/TransitionToDot.java b/src/org/processmining/slpnminer/models/TransitionToDot.java new file mode 100644 index 0000000..ac1539e --- /dev/null +++ b/src/org/processmining/slpnminer/models/TransitionToDot.java @@ -0,0 +1,4 @@ +package org.processmining.slpnminer.models; + +public class TransitionToDot { +} diff --git a/src/org/processmining/slpnminer/models/reachabilitygraph/AcceptStateSet.java b/src/org/processmining/slpnminer/models/reachabilitygraph/AcceptStateSet.java new file mode 100644 index 0000000..1195bc5 --- /dev/null +++ b/src/org/processmining/slpnminer/models/reachabilitygraph/AcceptStateSet.java @@ -0,0 +1,12 @@ +package org.processmining.slpnminer.models.reachabilitygraph; + +import java.util.LinkedHashSet; + +public class AcceptStateSet extends LinkedHashSet { + + /** + * + */ + private static final long serialVersionUID = -6359129086727314616L; + +} diff --git a/src/org/processmining/slpnminer/models/reachabilitygraph/CoverabilityGraph.java b/src/org/processmining/slpnminer/models/reachabilitygraph/CoverabilityGraph.java new file mode 100644 index 0000000..3d4b258 --- /dev/null +++ b/src/org/processmining/slpnminer/models/reachabilitygraph/CoverabilityGraph.java @@ -0,0 +1,10 @@ +package org.processmining.slpnminer.models.reachabilitygraph; + + +public class CoverabilityGraph extends ReachabilityGraph { + + public CoverabilityGraph(String label) { + super(label); + } + +} diff --git a/src/org/processmining/slpnminer/models/reachabilitygraph/CrossProduct.java b/src/org/processmining/slpnminer/models/reachabilitygraph/CrossProduct.java new file mode 100644 index 0000000..2b88c27 --- /dev/null +++ b/src/org/processmining/slpnminer/models/reachabilitygraph/CrossProduct.java @@ -0,0 +1,14 @@ +package org.processmining.slpnminer.models.reachabilitygraph; + + +public interface CrossProduct { + + Object[] getCrossProduct(ReachabilityGraph r1, + State init1, + AcceptStateSet fin1, + ReachabilityGraph r2, + State init2, + AcceptStateSet fin2); + + String getProbability(ReachabilityGraph r); +} diff --git a/src/org/processmining/slpnminer/models/reachabilitygraph/CrossProductImpl.java b/src/org/processmining/slpnminer/models/reachabilitygraph/CrossProductImpl.java new file mode 100644 index 0000000..aff21a8 --- /dev/null +++ b/src/org/processmining/slpnminer/models/reachabilitygraph/CrossProductImpl.java @@ -0,0 +1,273 @@ +package org.processmining.slpnminer.models.reachabilitygraph; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; + + +public class CrossProductImpl implements CrossProduct{ + + public ReachabilityGraph crossPetriTrace = new ReachabilityGraph( + "Cross product of trace and model"); + + public HashSet tempPetriStateToVisitSet = new HashSet<>(); + + public HashSet tempCurrentStateSet = new HashSet<>(); + + public HashMap combiToPetri = new HashMap(); + + public HashSet visitedStateSet = new HashSet<>(); + + @Override + public Object[] getCrossProduct(ReachabilityGraph rgFromPetri, + State initStateFromPetri, + AcceptStateSet finFromPetri, + ReachabilityGraph rgFromTrace, + State initStateFromTrace, + AcceptStateSet finFromTrace) { + HashSet petriStateLst = new HashSet<>(); + ArrayList traceStateLst = new ArrayList<>(); + + ArrayList traceTransLst = new ArrayList<>(); + State currentState = rgFromTrace.getNode(initStateFromTrace.getIdentifier()); + traceStateLst.add(currentState); + + // get the initial state from trace + while (!currentState.isAccepting()) { + for(Transition t: rgFromTrace.getOutEdges(currentState)){ + traceTransLst.add(t); + State nextStateInTrace = t.getTarget(); + traceStateLst.add(nextStateInTrace); + currentState = nextStateInTrace; + break; + } + } + // get the initial state from petri + petriStateLst.add(initStateFromPetri); + Transition t = null; + +// set up initial trace for the cross product + Boolean canDoCross = false; + + Transition initTransInTrace = traceTransLst.get(0); + + State initStateForCross = null; + + for(Transition t1: rgFromPetri.getOutEdges(initStateFromPetri)) { + + + + if(t1.getVisibility() || t1.getLabel().equals(initTransInTrace.getLabel())){ + canDoCross = true; + initStateForCross = new State(initStateFromTrace.getIdentifier()+","+initStateFromPetri.getIdentifier(), crossPetriTrace); + crossPetriTrace.addState(initStateForCross,true,false); + visitedStateSet.add(initStateFromTrace.getIdentifier() + "," + initStateFromPetri.getIdentifier()); + + combiToPetri.put(initStateForCross.getLabel(), initStateFromPetri.getLabel()); + + break; + } + } + if(!canDoCross) { + return null; + } + +// add state to currentStateSet + HashSet currentStateSet = new HashSet<>(); + currentStateSet.add(initStateForCross); + + for(int i=0; i(); +// get current transition from trace + t = traceTransLst.get(i); + + +// System.out.println("\ncurrent transition label: "+ t.getLabel() + ", and current state from trace:" + currentStateFromTrace.getIdentifier()); +// With current state from trace and corresponding transition, get all matching states from petri + for(State currentStateFromPetri: petriStateLst) { + getMatchingStates( + rgFromPetri, + rgFromTrace, + t.getLabel(), + currentStateFromTrace, + currentStateFromPetri, + 0); + } + // update the current boundary to visit + petriStateLst = tempPetriStateToVisitSet; + + + if (i(); + } + + Object[] obj = new Object[2]; + obj[0] = crossPetriTrace; + obj[1] = combiToPetri; + + return obj; + } + + // pinpoint whether current combi of currentStateFrom Trace and Petri exist. +// If No, construct, otherwise, continue. + public void getMatchingStates(ReachabilityGraph rgFromPetri, + ReachabilityGraph rgFromTrace, + String targetTransLabel, + State currentStateFromTrace, + State currentStateFromPetri, + Integer visCount){ +// System.out.println("all outgoing edge of state in petri:" +currentStateFromPetri+" "+ rgFromPetri.getOutEdges(currentStateFromPetri)); + for(Transition t: rgFromPetri.getOutEdges(currentStateFromPetri)) { + + if (visCount == 0 && t.getLabel().equals(targetTransLabel)) { + State crossState = new State(currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier(),crossPetriTrace); + if(rgFromPetri.getOutEdges(currentStateFromPetri).size()==0 && rgFromTrace.getOutEdges(currentStateFromTrace).size()==0){ + crossState.setAccepting(true); + crossPetriTrace.addState(crossState,false, true); + } + else { + crossPetriTrace.addState(crossState); + } + +// System.out.println("current transition to visit from petri: "+t.getLabel() + " "+targetTransLabel); + + visitedStateSet.add(currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier()); + + combiToPetri.put(crossState.getLabel(), currentStateFromPetri.getLabel()); + + tempCurrentStateSet.add(crossState); +// System.out.println("With " + t.getLabel()+": add one state to cross system: "+currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier()); + tempPetriStateToVisitSet.add(t.getTarget()); + + for(Transition t2: rgFromTrace.getOutEdges(currentStateFromTrace)) { + getMatchingStatesForOneSubState( + rgFromPetri, + rgFromTrace, + targetTransLabel, + t2.getTarget(), + t.getTarget(), + crossState, + t, + visCount+1, + 0); + break; + } + } + else if (t.getVisibility()) { + State crossState = new State(currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier(),crossPetriTrace); + if(rgFromPetri.getOutEdges(currentStateFromPetri).size()==0 && rgFromTrace.getOutEdges(currentStateFromTrace).size()==0){ + crossState.setAccepting(true); +// System.out.println("find acc state: "+crossState.isAccepting()); + crossPetriTrace.addState(crossState,false, true); + } + else { + crossPetriTrace.addState(crossState); + } + combiToPetri.put(crossState.getLabel(), currentStateFromPetri.getLabel()); + +// tempCurrentStateSet.add(crossState); +// System.out.println("With" + t.getLabel()+", add one state to cross system: "+currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier()); +// tempPetriStateToVisitSet.add(t.getTarget()); +// should I go further? + getMatchingStatesForOneSubState( + rgFromPetri, + rgFromTrace, + targetTransLabel, + currentStateFromTrace, + t.getTarget(), + crossState, + t, + visCount, + 0); + } + } + } + + public void getMatchingStatesForOneSubState(ReachabilityGraph rgFromPetri, + ReachabilityGraph rgFromTrace, + String targetTransLabel, + State currentStateFromTrace, + State currentStateFromPetri, + State previousCrossState, + Transition previousT, + Integer visCount, + Integer visCount2){ + + if(visCount2 >0 && visitedStateSet.contains(currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier())) { +// System.out.println("visit the node for the second time:" +currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier()); + return; + } +// System.out.println("all outgoing edge of state in petri:" +currentStateFromPetri+" "+ rgFromPetri.getOutEdges(currentStateFromPetri)); + State currentCrossState = new State(currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier(), crossPetriTrace); + if(rgFromPetri.getOutEdges(currentStateFromPetri).size()==0 && rgFromTrace.getOutEdges(currentStateFromTrace).size()==0){ + currentCrossState.setAccepting(true); + crossPetriTrace.addState(currentCrossState, false, true); + } + else{ + crossPetriTrace.addState(currentCrossState); + } + visitedStateSet.add(currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier()); + + combiToPetri.put(currentCrossState.getLabel(), currentStateFromPetri.getLabel()); + crossPetriTrace.addTransition(previousCrossState, currentCrossState, previousT.getIdentifier(), previousT.getLabel(), previousT.getVisibility()); + for(Transition t: rgFromPetri.getOutEdges(currentStateFromPetri)) { + if (visCount == 0 && t.getLabel().equals(targetTransLabel)) { + tempCurrentStateSet.add(currentCrossState); + combiToPetri.put(currentCrossState.getLabel(), currentStateFromPetri.getLabel()); + + tempPetriStateToVisitSet.add(t.getTarget()); + +// System.out.println("With " + t.getLabel()+", add one state to cross system: "+currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier()+" "+t.getLabel()); + + for(Transition t2: rgFromTrace.getOutEdges(currentStateFromTrace)) { + State nextStateInTrace = rgFromTrace.getNode(t2.getTarget().getIdentifier()); + getMatchingStatesForOneSubState( + rgFromPetri, + rgFromTrace, + targetTransLabel, + nextStateInTrace, + t.getTarget(), + currentCrossState, + t, + visCount+1, + visCount2 +1); + break; + } + } + else if (t.getVisibility()) { + crossPetriTrace.addTransition(previousCrossState, currentCrossState, previousT.getIdentifier(),previousT.getLabel(), previousT.getVisibility()); + +// System.out.println("With " + t.getLabel()+", add one state to cross system: "+currentStateFromTrace.getIdentifier() + "," + currentStateFromPetri.getIdentifier()+" "+t.getLabel()); + + if(visCount == 1) { + tempPetriStateToVisitSet.add(t.getTarget()); + tempCurrentStateSet.add(currentCrossState); + combiToPetri.put(currentCrossState.getLabel(), currentStateFromPetri.getLabel()); + } + + getMatchingStatesForOneSubState( + rgFromPetri, + rgFromTrace, + targetTransLabel, + currentStateFromTrace, + t.getTarget(), + currentCrossState, + t, + visCount, + visCount2 +1); + } + } + } + + + @Override + public String getProbability(ReachabilityGraph r) { + return null; + } +} diff --git a/src/org/processmining/slpnminer/models/reachabilitygraph/ReachabilityGraph.java b/src/org/processmining/slpnminer/models/reachabilitygraph/ReachabilityGraph.java new file mode 100644 index 0000000..6058037 --- /dev/null +++ b/src/org/processmining/slpnminer/models/reachabilitygraph/ReachabilityGraph.java @@ -0,0 +1,286 @@ +package org.processmining.slpnminer.models.reachabilitygraph; + +import org.apache.commons.collections15.MapIterator; +import org.apache.commons.collections15.keyvalue.MultiKey; +import org.apache.commons.collections15.map.MultiKeyMap; +import org.processmining.models.graphbased.directed.*; +import org.processmining.slpnminer.models.AttributeMap; +import org.processmining.slpnminer.models.TransitionSystem; + +import javax.swing.*; +import java.util.*; + + +public class ReachabilityGraph extends AbstractDirectedGraph implements TransitionSystem { + + private final Map states = new LinkedHashMap(); + + @SuppressWarnings("rawtypes") + private final MultiKeyMap t = new MultiKeyMap(); //3 keys: and a value , but is set to generics + + private Map proxyMap; + + public ReachabilityGraph(String label) { + super(); + getAttributeMap().put(AttributeMap.LABEL, label); + getAttributeMap().put(AttributeMap.PREF_ORIENTATION, SwingConstants.NORTH); + + proxyMap = new HashMap(); + } + + @Override + protected synchronized Map cloneFrom( + DirectedGraph graph) { + assert (graph instanceof ReachabilityGraph); + Map mapping = new HashMap(); + + ReachabilityGraph ts = (ReachabilityGraph) graph; + for (Object identifier : ts.states.keySet()) { + addState(identifier); + mapping.put(ts.states.get(identifier), getNode(identifier)); + } +// for (Transition trans : getEdges()) { + for (Transition trans : ts.getEdges()) { + addTransition(trans.getSource().getIdentifier(), trans.getTarget().getIdentifier(), trans.getIdentifier(), + trans.getVisibility()); + mapping.put(trans, findTransition(trans.getSource().getIdentifier(), trans.getTarget().getIdentifier(), + trans.getIdentifier())); + } + return mapping; + } + + @Override + protected synchronized AbstractDirectedGraph getEmptyClone() { + return new ReachabilityGraph(getLabel()); + } + + /** + * Node/State Handling + * + */ + + public synchronized boolean addState(Object identifier) { + if (!states.containsKey(identifier)) { + State state = new State(identifier, this); + states.put(identifier, state); + graphElementAdded(state); + return true; + } else { + return false; + } + } + + public synchronized boolean addState(Object identifier, Boolean isInitial,Boolean isAccept) { + if (!states.containsKey(identifier)) { + State state = new State(identifier, this); + state.setInitiating(isInitial); + state.setAccepting(isAccept); + states.put(identifier, state); + graphElementAdded(state); + return true; + } else { + return false; + } + } + + public synchronized void removeNode(DirectedGraphNode node) { + if (node instanceof State) { + removeState(node); + } else { + assert (false); + } + } + + public synchronized Set getNodes() { + Set nodes = new HashSet(); + nodes.addAll(states.values()); + return nodes; + } + + public synchronized Collection getStates() { + return states.keySet(); + } + + + public synchronized Object removeState(Object state) { + if (state instanceof State) { + Object removed = states.get(((State) state).getIdentifier()); + states.remove(((State) state).getIdentifier()); + graphElementRemoved(removed); + return removed; + } + else return null; + } + + public synchronized State getNode(Object identifier) { + return states.get(getProxy(identifier)); + } + + public synchronized Boolean containsNode(Object identifier) { + return states.containsKey(getProxy(identifier)); + } + + /** + * Edge/Transition Handling + * + */ + + @SuppressWarnings("unchecked") + public synchronized boolean addTransition(Object fromState, Object toState, Object identifier, String label, Boolean isInvisible) { + State source = getNode(fromState); + State target = getNode(toState); + checkAddEdge(source, target); + + if (!t.containsKey(source, target, identifier)) { + Transition transition = new Transition(source, target, identifier, label, isInvisible); + t.put(source, target, identifier, transition); //Stores the Source State, Target State and Object identifier as keys of a Transition. + graphElementAdded(transition); + target.addPreTrans(transition); // add this previous transition to target state. + return true; + } else { + return false; + } + } + + @SuppressWarnings("unchecked") + public synchronized boolean addTransition(Object fromState, Object toState, Object identifier, + String label, Boolean isInvisible, Double weight) { + State source = getNode(fromState); + State target = getNode(toState); + checkAddEdge(source, target); + + if (!t.containsKey(source, target, identifier)) { + Transition transition = new Transition(source, target, identifier, label, weight, isInvisible); + t.put(source, target, identifier, transition); //Stores the Source State, Target State and Object identifier as keys of a Transition. + graphElementAdded(transition); + target.addPreTrans(transition); // add this previous transition to target state. + return true; + } else { + return false; + } + } + + public synchronized Transition addAndReturnTransition(Object fromState, Object toState, Object identifier, String label, Boolean isInvisible) { + State source = getNode(fromState); + State target = getNode(toState); + checkAddEdge(source, target); + + if (!t.containsKey(source, target, identifier)) { + Transition transition = new Transition(source, target, identifier, label, isInvisible); + t.put(source, target, identifier, transition); //Stores the Source State, Target State and Object identifier as keys of a Transition. + graphElementAdded(transition); + target.addPreTrans(transition); // add this previous transition to target state. + return transition; + } else { + return null; + } + } + + @SuppressWarnings("unchecked") + public synchronized Object removeTransition(Object fromState, Object toState, Object identifier) { + Object removed = t.get(getNode(fromState), getNode(toState), identifier); + t.remove(getNode(fromState), getNode(toState), identifier); + graphElementRemoved(removed); + return removed; + } + + @SuppressWarnings("unchecked") + @Override + public synchronized void removeEdge(@SuppressWarnings("rawtypes") DirectedGraphEdge edge) { + assert (edge instanceof Transition); + Transition transition = (Transition) edge; + t.remove(transition.getSource(),transition.getTarget(),transition.getIdentifier()); + } + + public synchronized Collection getTransitions() { + Set keys = new LinkedHashSet(); + for(Object trans : t.values()) + keys.add(((Transition) trans).getIdentifier()); + return keys; + } + + @SuppressWarnings("unchecked") + public synchronized Collection getTransitionObjects() { + return t.values(); + } + + @SuppressWarnings("unchecked") + public synchronized Set getEdges() { + Set result = new HashSet(); + result.addAll(t.values()); + return Collections.unmodifiableSet(result); + } + + @SuppressWarnings("rawtypes") + public Collection getEdges(Object identifier) { + Set collection = new LinkedHashSet(); + MapIterator iterator = t.mapIterator(); + while(iterator.hasNext()) + { + iterator.next(); + MultiKey key = (MultiKey) iterator.getKey(); + if(identifier.equals(key.getKey(2))){ + collection.add((Transition) t.get(key)); + } + } + return collection; + } + + @SuppressWarnings("unchecked") + public synchronized Transition findTransition(Object fromState, Object toState, Object identifier) { + return (Transition) t.get(getNode(fromState),getNode(toState),identifier); + } + + + public void putProxy(Object obj, Object proxy) { + proxyMap.put(obj, proxy); + } + + private Object getProxy(Object obj) { + while (proxyMap.containsKey(obj)) { + obj = proxyMap.get(obj); + } + return obj; + } + + public void addProxyMap(ReachabilityGraph ts) { + for (Object key: ts.proxyMap.keySet()) { + proxyMap.put(key, ts.proxyMap.get(key)); + } + } + + public HashSet getReachableState(){ + + + HashSet reachableStateSet = new HashSet(); + + for (State state : getNodes()) { + if(state.isAccepting()) { + reachableStateSet.add(state); + reachableStateSet = getPrecedenceState(state, reachableStateSet); + } + } + return reachableStateSet; + } + + public HashSet getPrecedenceState(State state, HashSet reachableStateSet){ + for (Transition t: state.getPreTransSet()) { + if(reachableStateSet.contains(t.getSource())) { + continue; + } + reachableStateSet.add(t.getSource()); + + if(!t.getSource().isInitiating()) { + reachableStateSet = getPrecedenceState(t.getSource(), reachableStateSet); + } + } + return reachableStateSet; + } + + @Override + public boolean addTransition(Object fromState, Object toState, Object identifier, Boolean isInvisible) { + // TODO Auto-generated method stub + return false; + } + +} diff --git a/src/org/processmining/slpnminer/models/reachabilitygraph/StartStateSet.java b/src/org/processmining/slpnminer/models/reachabilitygraph/StartStateSet.java new file mode 100644 index 0000000..13532c7 --- /dev/null +++ b/src/org/processmining/slpnminer/models/reachabilitygraph/StartStateSet.java @@ -0,0 +1,12 @@ +package org.processmining.slpnminer.models.reachabilitygraph; + +import java.util.LinkedHashSet; + +public class StartStateSet extends LinkedHashSet { + + /** + * + */ + private static final long serialVersionUID = -3233656692840410245L; + +} diff --git a/src/org/processmining/slpnminer/models/reachabilitygraph/State.java b/src/org/processmining/slpnminer/models/reachabilitygraph/State.java new file mode 100644 index 0000000..872193f --- /dev/null +++ b/src/org/processmining/slpnminer/models/reachabilitygraph/State.java @@ -0,0 +1,118 @@ +package org.processmining.slpnminer.models.reachabilitygraph; + +import org.processmining.framework.util.Cast; +import org.processmining.framework.util.HTMLToString; +import org.processmining.models.graphbased.directed.AbstractDirectedGraphNode; +import org.processmining.models.shapes.Decorated; +import org.processmining.models.shapes.Ellipse; +import org.processmining.slpnminer.models.AttributeMap; + +import java.awt.*; +import java.awt.geom.Ellipse2D; +import java.util.HashSet; + +public class State extends AbstractDirectedGraphNode implements Decorated { + + /** + * this object identifies the state. + */ + private final Object identifier; + private final ReachabilityGraph graph; + + /** + * This accepting is stored for painting the state in the graph (see method + * decorate). + */ + private boolean accepting; + private boolean initiating; + private HashSet preTransSet; // store the previous transition that lead to this state + + public State(Object identifier, ReachabilityGraph graph) { + super(); + this.identifier = identifier; + this.graph = graph; + this.preTransSet = new HashSet(); + getAttributeMap().put(AttributeMap.SHAPE, new Ellipse()); + getAttributeMap().put(AttributeMap.SQUAREBB, false); + getAttributeMap().put(AttributeMap.RESIZABLE, true); + getAttributeMap().put(AttributeMap.SIZE, new Dimension(100, 60)); + getAttributeMap().put(AttributeMap.FILLCOLOR, Color.LIGHT_GRAY); + if (identifier instanceof HTMLToString) { + getAttributeMap().put(AttributeMap.LABEL, Cast.cast(identifier).toHTMLString(true)); + } else { + getAttributeMap().put(AttributeMap.LABEL, identifier.toString()); + } + getAttributeMap().put(AttributeMap.SHOWLABEL, false); + getAttributeMap().put(AttributeMap.AUTOSIZE, false); + setAccepting(false); + } + + public Object getIdentifier() { + return identifier; + } + + public boolean equals(Object o) { + return (o instanceof State ? identifier.equals(((State) o).identifier) : false); + } + + public int hashCode() { + return identifier.hashCode(); + } + + @Override + public ReachabilityGraph getGraph() { + return graph; + } + + public void setInitiating(boolean initiating) { + this.initiating = initiating; + } + + public void setAccepting(boolean accepting) { + this.accepting = accepting; + } + + public boolean isInitiating() { + return initiating; + } + + public boolean isAccepting() { + return accepting; + } + + public void setLabel(String label) { + getAttributeMap().put(AttributeMap.LABEL, label); + } + + public String getLabel() { + return (String) getAttributeMap().get(AttributeMap.LABEL); + } + + public void addPreTrans(Transition t) { + preTransSet.add(t); + } + + public HashSet getPreTransSet(){ + return preTransSet; + } + + /** + * If this state is an accepting state, then an extra line is painted in the + * GUI as the border of this state. + */ + public void decorate(Graphics2D g2d, double x, double y, double width, double height) { + if (isAccepting()) { + Float line = getAttributeMap().get(AttributeMap.LINEWIDTH, 1f); + int pointOffset = 3 + line.intValue() / 2; + int sizeOffset = (2 * pointOffset) + 1; + // Remember current stroke. + Stroke stroke = g2d.getStroke(); + // Use thin line for extra line. + g2d.setStroke(new BasicStroke(1f)); + // Draw extra line. + g2d.draw(new Ellipse2D.Double(x + pointOffset, y + pointOffset, width - sizeOffset, height - sizeOffset)); + // Reset remembered stroke. + g2d.setStroke(stroke); + } + } +} diff --git a/src/org/processmining/slpnminer/models/reachabilitygraph/Transition.java b/src/org/processmining/slpnminer/models/reachabilitygraph/Transition.java new file mode 100644 index 0000000..7e7a25b --- /dev/null +++ b/src/org/processmining/slpnminer/models/reachabilitygraph/Transition.java @@ -0,0 +1,68 @@ +package org.processmining.slpnminer.models.reachabilitygraph; + +import org.processmining.slpnminer.models.AttributeMap; +import org.processmining.slpnminer.models.AttributeMap.ArrowType; +import org.processmining.models.graphbased.directed.AbstractDirectedGraphEdge; + + +public class Transition extends AbstractDirectedGraphEdge { + + /** + * This field identifies the transition, i.e. it is the object corresponding + * to this transition. + */ + private Object identifier; + private Double weight; + private Boolean visibility; + + + public Transition(State source, State target, Object identifier, String label, Boolean isInvisible) { + super(source, target); + this.identifier = identifier; + this.visibility = isInvisible; + getAttributeMap().put(AttributeMap.LABEL, label); + getAttributeMap().put(AttributeMap.EDGEEND, ArrowType.ARROWTYPE_SIMPLE); + getAttributeMap().put(AttributeMap.EDGEENDFILLED, true); + getAttributeMap().put(AttributeMap.SHOWLABEL, true); + } + + public Transition(State source, State target, Object identifier, String label, Double weight, Boolean isInvisible) { + super(source, target); + this.identifier = identifier; + this.weight = weight; + this.visibility = isInvisible; + getAttributeMap().put(AttributeMap.LABEL, label); + getAttributeMap().put(AttributeMap.EDGEEND, ArrowType.ARROWTYPE_SIMPLE); + getAttributeMap().put(AttributeMap.EDGEENDFILLED, true); + getAttributeMap().put(AttributeMap.SHOWLABEL, true); + } + + // The type-cast is safe, since the super.equals(o) tests for class + // equivalence + public boolean equals(Object o) { + return super.equals(o) && identifier.equals(((Transition) o).identifier); + } + + public Object getIdentifier() { + return identifier; + } + + public Boolean getVisibility() { + return visibility; + } + + public void setLabel(String label) { + getAttributeMap().put(AttributeMap.LABEL, label); + } + + public String getLabel() { + Object o = getAttributeMap().get(AttributeMap.LABEL); + return (o == null ? null : (String) o); + } + + public Double getWeight() { + // TODO Auto-generated method stub + return weight; + } + +} diff --git a/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNet.java b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNet.java new file mode 100644 index 0000000..53e0753 --- /dev/null +++ b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNet.java @@ -0,0 +1,85 @@ +package org.processmining.slpnminer.models.slpn; + +public interface StochasticLabelledPetriNet { + /** + * + * @return the number of transitions. All transitions have indices starting + * at 0 and ending at the returned value (exclusive). + */ + public int getNumberOfTransitions(); + + /** + * + * @return the number of places. All places have indices starting at 0 and + * ending at the returned value (exclusive). + */ + public int getNumberOfPlaces(); + + /** + * Only call when it is certain that the transition is not a silent + * transition. + * + * @param transition + * @return the label of the transition. + */ + public String getTransitionLabel(int transition); + + /** + * + * @param transition + * @return whether the transition is a silent transition + */ + public boolean isTransitionSilent(int transition); + + /** + * + * @param place + * @return the number of tokens on this place in the initial marking. + */ + public int isInInitialMarking(int place); + + /** + * + * @param transition + * @return a list of places that have arcs to this transition. Transitions + * may appear multiple times. The caller must not change the + * returned array. + */ + public int[] getInputPlaces(int transition); + + /** + * + * @param transition + * @return a list of places that have arcs from this transition. Transitions + * may appear multiple times. The caller must not change the + * returned array. + */ + public int[] getOutputPlaces(int transition); + + /** + * + * @param place + * @return a list of transitions that have arcs to this place. Places may + * appear multiple times. The caller must not change the returned + * array. + */ + public int[] getInputTransitions(int place); + + /** + * + * @param place + * @return a list of transitions that have arcs from this place. Places may + * appear multiple times. The caller must not change the returned + * array. + */ + public int[] getOutputTransitions(int place); + + /** + * + * @return an object that allows for a standardised interpretation of the + * language of the net. The returned object might not be thread safe + * and the implementer must ensure a new, fresh, object is returned + * for each call. + */ + public StochasticLabelledPetriNetSemantics getDefaultSemantics(); +} diff --git a/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetEditable.java b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetEditable.java new file mode 100644 index 0000000..bb7b71f --- /dev/null +++ b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetEditable.java @@ -0,0 +1,75 @@ +package org.processmining.slpnminer.models.slpn; + +public interface StochasticLabelledPetriNetEditable extends StochasticLabelledPetriNet { + public void setTransitionLabel(int transition, String label); + + public void makeTransitionSilent(int transition); + + /** + * Add a labelled transition. + * + * @param label + * @param weight + * @return the index of the transition. + */ + public int addTransition(String label, double weight); + + /** + * Add a silent transition. + * + * @param weight + * @return the index of the transition. + */ + public int addTransition(double weight); + + /** + * + * @return the index of the place. + */ + public int addPlace(); + + /** + * + * @param place + * @param cardinality + * May be negative, however ensure the final marking contains a + * positive number of tokens in each place. + */ + public void addPlaceToInitialMarking(int place, int cardinality); + + /** + * Adds a token to the given place in the final marking. + * + * @param place + */ + public void addPlaceToInitialMarking(int place); + + public void addPlaceTransitionArc(int place, int transition); + + /** + * + * @param place + * @param transition + * @param cardinality + * May be negative. + */ + public void addPlaceTransitionArc(int place, int transition, int cardinality); + + public void addTransitionPlaceArc(int transition, int place); + + /** + * + * @param transition + * @param place + * @param cardinality + * May be negative. + */ + public void addTransitionPlaceArc(int transition, int place, int cardinality); + + /** + * + * @param transition + * @param weight + */ + public void setTransitionWeight(int transition, double weight); +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetImpl.java b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetImpl.java new file mode 100644 index 0000000..c8380e0 --- /dev/null +++ b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetImpl.java @@ -0,0 +1,175 @@ +package org.processmining.slpnminer.models.slpn; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apache.commons.lang3.ArrayUtils; + +import gnu.trove.map.TIntIntMap; +import gnu.trove.map.hash.TIntIntHashMap; + +public abstract class StochasticLabelledPetriNetImpl implements StochasticLabelledPetriNetEditable { + + private ArrayList transitionLabels; + private TIntIntMap initialMarking; + private List inputPlaces = new ArrayList<>(); + private List outputPlaces = new ArrayList<>(); + private List inputTransitions = new ArrayList<>(); + private List outputTransitions = new ArrayList<>(); + + public StochasticLabelledPetriNetImpl() { + transitionLabels = new ArrayList<>(); + initialMarking = new TIntIntHashMap(10, 0.5f, -1, 0); + inputPlaces = new ArrayList<>(); + outputPlaces = new ArrayList<>(); + } + + @Override + public int getNumberOfTransitions() { + return transitionLabels.size(); + } + + @Override + public int getNumberOfPlaces() { + return inputTransitions.size(); + } + + @Override + public String getTransitionLabel(int transition) { + return transitionLabels.get(transition); + } + + @Override + public boolean isTransitionSilent(int transition) { + return transitionLabels.get(transition) == null; + } + + @Override + public int isInInitialMarking(int place) { + return initialMarking.get(place); + } + + @Override + public int[] getInputPlaces(int transition) { + return inputPlaces.get(transition); + } + + @Override + public int[] getOutputPlaces(int transition) { + return outputPlaces.get(transition); + } + + @Override + public int[] getInputTransitions(int place) { + return inputTransitions.get(place); + } + + @Override + public int[] getOutputTransitions(int place) { + return outputTransitions.get(place); + } + + @Override + public void setTransitionLabel(int transition, String label) { + transitionLabels.set(transition, label); + } + + @Override + public void makeTransitionSilent(int transition) { + transitionLabels.set(transition, null); + } + + @Override + public int addTransition(String label, double weight) { + inputPlaces.add(new int[0]); + outputPlaces.add(new int[0]); + transitionLabels.add(label); + return transitionLabels.size() - 1; + } + + @Override + public int addTransition(double weight) { + return addTransition(null, weight); + } + + @Override + public int addPlace() { + inputTransitions.add(new int[0]); + outputTransitions.add(new int[0]); + return inputTransitions.size() - 1; + } + + @Override + public void addPlaceToInitialMarking(int place, int cardinality) { + initialMarking.adjustOrPutValue(place, cardinality, cardinality); + } + + @Override + public void addPlaceToInitialMarking(int place) { + addPlaceToInitialMarking(place, 1); + } + + @Override + public void addPlaceTransitionArc(int place, int transition) { + addPlaceTransitionArc(place, transition, 1); + } + + @Override + public void addPlaceTransitionArc(int place, int transition, int cardinality) { + int[] xOutputTransitions = outputTransitions.get(place); + int[] xInputPlaces = inputPlaces.get(transition); + + if (cardinality > 0) { + //add arcs + xOutputTransitions = Arrays.copyOf(xOutputTransitions, xOutputTransitions.length + cardinality); + xInputPlaces = Arrays.copyOf(xInputPlaces, xInputPlaces.length + cardinality); + for (int i = 0; i < cardinality; i++) { + xOutputTransitions[i + xOutputTransitions.length - cardinality] = transition; + xInputPlaces[i + xInputPlaces.length - cardinality] = place; + } + } else { + //remove arcs or no action + for (int i = 0; i < cardinality; i++) { + xOutputTransitions = ArrayUtils.removeElement(xOutputTransitions, transition); + } + for (int i = 0; i < cardinality; i++) { + xInputPlaces = ArrayUtils.removeElement(xInputPlaces, place); + } + } + outputTransitions.set(place, xOutputTransitions); + inputPlaces.set(transition, xInputPlaces); + } + + @Override + public void addTransitionPlaceArc(int transition, int place) { + addTransitionPlaceArc(transition, place, 1); + } + + @Override + public void addTransitionPlaceArc(int transition, int place, int cardinality) { + int[] xOutputPlaces = outputPlaces.get(transition); + int[] xInputTransitions = inputTransitions.get(place); + + if (cardinality > 0) { + //add arcs + xOutputPlaces = Arrays.copyOf(xOutputPlaces, xOutputPlaces.length + cardinality); + xInputTransitions = Arrays.copyOf(xInputTransitions, xInputTransitions.length + cardinality); + for (int i = 0; i < cardinality; i++) { + xOutputPlaces[i + xOutputPlaces.length - cardinality] = place; + xInputTransitions[i + xInputTransitions.length - cardinality] = transition; + } + } else { + //remove arcs or no action + for (int i = 0; i < cardinality; i++) { + xOutputPlaces = ArrayUtils.removeElement(xOutputPlaces, place); + } + for (int i = 0; i < cardinality; i++) { + xInputTransitions = ArrayUtils.removeElement(xInputTransitions, transition); + } + } + inputTransitions.set(place, xInputTransitions); + outputPlaces.set(transition, xOutputPlaces); + } + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSemantics.java b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSemantics.java new file mode 100644 index 0000000..d4a36dc --- /dev/null +++ b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSemantics.java @@ -0,0 +1,86 @@ +package org.processmining.slpnminer.models.slpn; + +import java.util.BitSet; + +/** + * Semantics may be implemented using a state machine, thus the underlying Petri + * net might not be able to be changed. + * + * @author sander + * + */ +public interface StochasticLabelledPetriNetSemantics extends Cloneable { + + public int getNumberOfTransitions(); + + /** + * (Re)set the semantics to the initial state. + */ + public void setInitialState(); + + /** + * Update the state to reflect execution of the transition. + * + * @param transition + */ + public void executeTransition(int transition); + + /** + * + * @param state + * @return an array of indices of the transitions that are enabled. May be + * changed and stored by the caller. + */ + public BitSet getEnabledTransitions(); + + /** + * + * @return whether the current state is a final state. + */ + public boolean isFinalState(); + + /** + * + * @return a copy of the current state. + */ + public byte[] getState(); + + /** + * Set a copy of the given state. + * + * @param state + */ + public void setState(byte[] state); + + /** + * + * @param transition + * @return whether the given transition is silent + */ + public boolean isTransitionSilent(int transition); + + /** + * Only call when it is certain that the transition is not a silent + * transition. + * + * @param transition + * @return the label of the transition. + */ + public String getTransitionLabel(int transition); + + /** + * + * @param transition + * @return the weight of the transition. This might depend on the state. + */ + public double getTransitionWeight(int transition); + + /** + * + * @param enabledTransitions + * @return the sum of the weight of the enabled transitions + */ + public double getTotalWeightOfEnabledTransitions(); + + public StochasticLabelledPetriNetSemantics clone(); +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSemanticsImpl.java b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSemanticsImpl.java new file mode 100644 index 0000000..c4fc183 --- /dev/null +++ b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSemanticsImpl.java @@ -0,0 +1,180 @@ +package org.processmining.slpnminer.models.slpn; + +import java.util.BitSet; + +import org.python.bouncycastle.util.Arrays; + +/** + * This semantics aims to avoid traversing all transitions. After construction, + * executing a transition will only consider the transitions whose enabledness + * may have changed. The only all-transition operation is BitSet.clear(). + * + * After construction, the semantics will only allocate local non-array + * variables. + * + * @author sander + * + */ +public abstract class StochasticLabelledPetriNetSemanticsImpl implements StochasticLabelledPetriNetSemantics { + + private StochasticLabelledPetriNet net; + private byte[] state; + private byte[] cacheState; + protected BitSet enabledTransitions; + private BitSet cacheTransition; + protected int numberOfEnabledTransitions; + + public StochasticLabelledPetriNetSemanticsImpl(StochasticLabelledPetriNet net) { + this.net = net; + state = new byte[net.getNumberOfPlaces()]; + cacheState = new byte[net.getNumberOfPlaces()]; + cacheTransition = new BitSet(net.getNumberOfTransitions()); + enabledTransitions = new BitSet(net.getNumberOfTransitions()); + setInitialState(); + } + + @Override + public int getNumberOfTransitions() { + return net.getNumberOfTransitions(); + } + + @Override + public void setInitialState() { + enabledTransitions.clear(); + numberOfEnabledTransitions = 0; + for (int place = 0; place < net.getNumberOfPlaces(); place++) { + state[place] = (byte) net.isInInitialMarking(place); + } + computeEnabledTransitions(); + } + + @Override + public void executeTransition(int transition) { + int[] inSet = net.getInputPlaces(transition); + for (int place : inSet) { + if (state[place] == 0) { + throw new RuntimeException("non-existing token consumed"); + } + state[place]--; + + //update the enabled transitions; some transitions might be disabled by this execution + for (int transitionT : net.getOutputTransitions(place)) { + computeEnabledTransition(transitionT); + } + } + + int[] postSet = net.getOutputPlaces(transition); + for (int place : postSet) { + if (state[place] == Byte.MAX_VALUE) { + throw new RuntimeException("maximum number of tokens in a place exceeded"); + } + state[place]++; + + //update the enabled transitions; some transitions might be enabled by this execution + for (int transitionT : net.getOutputTransitions(place)) { + computeEnabledTransition(transitionT); + } + } + } + + private boolean computeEnabledTransition(int transition) { + //due to potential multiplicity of arcs, we have to keep track of how many tokens we would consume + System.arraycopy(state, 0, cacheState, 0, state.length); + + int[] inSet = net.getInputPlaces(transition); + for (int inPlace : inSet) { + if (cacheState[inPlace] == 0) { + if (enabledTransitions.get(transition)) { + enabledTransitions.set(transition, false); + numberOfEnabledTransitions--; + } + return false; + } else { + cacheState[inPlace]--; + } + } + + if (!enabledTransitions.get(transition)) { + enabledTransitions.set(transition, true); + numberOfEnabledTransitions++; + } + return true; + } + + private void computeEnabledTransitions() { + numberOfEnabledTransitions = 0; + for (int transition = 0; transition < net.getNumberOfTransitions(); transition++) { + computeEnabledTransition(transition); + } + } + + @Override + public BitSet getEnabledTransitions() { + return (BitSet) enabledTransitions.clone(); + } + + @Override + public boolean isFinalState() { + return numberOfEnabledTransitions == 0; + } + + @Override + public byte[] getState() { + return Arrays.clone(state); + } + + @Override + public void setState(byte[] newState) { + byte[] oldState = this.state; + this.state = Arrays.clone(newState); + + cacheTransition.clear(); + + //walk through all places that have changed, and update the transition enabledness + for (int place = 0; place < net.getNumberOfPlaces(); place++) { + if (oldState[place] != state[place]) { + for (int transition : net.getInputTransitions(place)) { + if (!cacheTransition.get(transition)) { + computeEnabledTransition(transition); + cacheTransition.set(transition); + } + } + for (int transition : net.getOutputTransitions(place)) { + if (!cacheTransition.get(transition)) { + computeEnabledTransition(transition); + cacheTransition.set(transition); + } + } + } + } + } + + @Override + public boolean isTransitionSilent(int transition) { + return net.isTransitionSilent(transition); + } + + @Override + public String getTransitionLabel(int transition) { + return net.getTransitionLabel(transition); + } + + @Override + public StochasticLabelledPetriNetSemanticsImpl clone() { + StochasticLabelledPetriNetSemanticsImpl result; + try { + result = (StochasticLabelledPetriNetSemanticsImpl) super.clone(); + } catch (CloneNotSupportedException e) { + return null; + } + + result.net = net; + result.cacheState = Arrays.clone(cacheState); + result.cacheTransition = (BitSet) cacheTransition.clone(); + result.enabledTransitions = (BitSet) enabledTransitions.clone(); + result.numberOfEnabledTransitions = numberOfEnabledTransitions; + result.state = Arrays.clone(state); + + return result; + } +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSemanticsSimpleWeightsImpl.java b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSemanticsSimpleWeightsImpl.java new file mode 100644 index 0000000..bc7151c --- /dev/null +++ b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSemanticsSimpleWeightsImpl.java @@ -0,0 +1,46 @@ +package org.processmining.slpnminer.models.slpn; + +/** + * This semantics aims to avoid traversing all transitions. After construction, + * executing a transition will only consider the transitions whose enabledness + * may have changed. The only all-transition operation is BitSet.clear(). + * + * After construction, the semantics will only allocate local non-array + * variables. + * + * @author sander + * + */ +public class StochasticLabelledPetriNetSemanticsSimpleWeightsImpl extends StochasticLabelledPetriNetSemanticsImpl { + + private StochasticLabelledPetriNetSimpleWeights net; + + public StochasticLabelledPetriNetSemanticsSimpleWeightsImpl(StochasticLabelledPetriNetSimpleWeights net) { + super(net); + this.net = net; + } + + @Override + public double getTransitionWeight(int transition) { + return net.getTransitionWeight(transition); + } + + @Override + public double getTotalWeightOfEnabledTransitions() { + double result = 0; + for (int transition = enabledTransitions.nextSetBit(0); transition >= 0; transition = enabledTransitions + .nextSetBit(transition + 1)) { + result += net.getTransitionWeight(transition); + } + return result; + } + + @Override + public StochasticLabelledPetriNetSemanticsSimpleWeightsImpl clone() { + StochasticLabelledPetriNetSemanticsSimpleWeightsImpl result = (StochasticLabelledPetriNetSemanticsSimpleWeightsImpl) super.clone(); + + result.net = net; + + return result; + } +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSimpleWeights.java b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSimpleWeights.java new file mode 100644 index 0000000..c7001b5 --- /dev/null +++ b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSimpleWeights.java @@ -0,0 +1,12 @@ +package org.processmining.slpnminer.models.slpn; + +public interface StochasticLabelledPetriNetSimpleWeights extends StochasticLabelledPetriNet { + + /** + * + * @param transition + * @return the weight of the transition. + */ + public double getTransitionWeight(int transition); + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSimpleWeightsEditable.java b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSimpleWeightsEditable.java new file mode 100644 index 0000000..2814438 --- /dev/null +++ b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSimpleWeightsEditable.java @@ -0,0 +1,8 @@ +package org.processmining.slpnminer.models.slpn; + +public interface StochasticLabelledPetriNetSimpleWeightsEditable + extends StochasticLabelledPetriNetSimpleWeights, StochasticLabelledPetriNetEditable { + + public void setTransitionWeight(int transition, double weight); + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSimpleWeightsImpl.java b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSimpleWeightsImpl.java new file mode 100644 index 0000000..fc0b96f --- /dev/null +++ b/src/org/processmining/slpnminer/models/slpn/StochasticLabelledPetriNetSimpleWeightsImpl.java @@ -0,0 +1,36 @@ +package org.processmining.slpnminer.models.slpn; + +import gnu.trove.list.array.TDoubleArrayList; + +public class StochasticLabelledPetriNetSimpleWeightsImpl extends StochasticLabelledPetriNetImpl + implements StochasticLabelledPetriNetSimpleWeightsEditable { + + private TDoubleArrayList transitionWeights; + + public StochasticLabelledPetriNetSimpleWeightsImpl() { + transitionWeights = new TDoubleArrayList(); + } + + @Override + public void setTransitionWeight(int transition, double weight) { + transitionWeights.set(transition, weight); + } + + @Override + public int addTransition(String label, double weight) { + super.addTransition(label, weight); + transitionWeights.add(weight); + return transitionWeights.size() - 1; + } + + @Override + public double getTransitionWeight(int transition) { + return transitionWeights.get(transition); + } + + @Override + public StochasticLabelledPetriNetSemantics getDefaultSemantics() { + return new StochasticLabelledPetriNetSemanticsSimpleWeightsImpl(this); + } + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/parameters/GraphVisualiserParameters.java b/src/org/processmining/slpnminer/parameters/GraphVisualiserParameters.java new file mode 100644 index 0000000..9826410 --- /dev/null +++ b/src/org/processmining/slpnminer/parameters/GraphVisualiserParameters.java @@ -0,0 +1,38 @@ +package org.processmining.slpnminer.parameters; + +public class GraphVisualiserParameters { + + private String internalLabelFormat; + private String externalLabelFormat; + private String toolTipFormat; + + public GraphVisualiserParameters() { + setInternalLabelFormat("%l"); // Use JGraph internal label as internal label. + setExternalLabelFormat("%p"); // Use place label as external label. + setToolTipFormat("%t"); // Use tooltip as tooltip (seems not to show). + } + + public String getInternalLabelFormat() { + return internalLabelFormat; + } + + public void setInternalLabelFormat(String internalLabelFormat) { + this.internalLabelFormat = internalLabelFormat; + } + + public String getExternalLabelFormat() { + return externalLabelFormat; + } + + public void setExternalLabelFormat(String externalLabelFormat) { + this.externalLabelFormat = externalLabelFormat; + } + + public String getToolTipFormat() { + return toolTipFormat; + } + + public void setToolTipFormat(String toolTipFormat) { + this.toolTipFormat = toolTipFormat; + } +} diff --git a/src/org/processmining/slpnminer/plugins/CrossProdGenerator.java b/src/org/processmining/slpnminer/plugins/CrossProdGenerator.java new file mode 100644 index 0000000..7ca055a --- /dev/null +++ b/src/org/processmining/slpnminer/plugins/CrossProdGenerator.java @@ -0,0 +1,524 @@ +package org.processmining.slpnminer.plugins; + +import java.text.ParseException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Map; +import java.util.Queue; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.framework.plugin.events.Logger; +import org.processmining.models.connections.petrinets.behavioral.BehavioralAnalysisInformationConnection; +import org.processmining.models.connections.petrinets.behavioral.BoundednessInfoConnection; +import org.processmining.models.connections.petrinets.behavioral.InitialMarkingConnection; +import org.processmining.models.connections.petrinets.behavioral.ReachabilitySetConnection; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +import org.processmining.models.graphbased.directed.petrinet.analysis.NetAnalysisInformation; +import org.processmining.models.graphbased.directed.petrinet.analysis.ReachabilitySet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.graphbased.directed.petrinet.impl.PetrinetImpl; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.CTMarking; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.models.semantics.petrinet.PetrinetSemantics; +import org.processmining.models.semantics.petrinet.impl.PetrinetSemanticsFactory; +import org.processmining.slpnminer.connections.DeadMarkingConnection; +import org.processmining.slpnminer.connections.ReachabilityConnection; +import org.processmining.slpnminer.connections.StateSpaceConnection; +import org.processmining.slpnminer.helpers.StrToExp; +import org.processmining.slpnminer.models.BoundednessAnalyzer; +import org.processmining.slpnminer.models.reachabilitygraph.AcceptStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.CoverabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.CrossProductImpl; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.StartStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.State; + + +@Plugin(name = "Discover reachability graph", + returnLabels = { "stochastic labelled Petri net" }, + returnTypes = { ReachabilityGraph.class}, + parameterLabels = { "net", "log"}) +public class CrossProdGenerator { + + private static final int MAXSTATES = 25000; + + private static int modelTransitionNum; + + private static int logSize; + + private static HashMap strToDouble = new HashMap<>(); + + private static HashMap traceVariantMap; + + private static HashMap isolatedVariantMap = new HashMap(); + + private static Boolean mapFlag = true; + + + @UITopiaVariant(affiliation = UITopiaVariant.EHV, author = "Tian Li", email = "t.li@bpm.rwth-aachen.de", pack = "WeightEstimation") + @PluginVariant(requiredParameterLabels = {0, 1}) + public ReachabilityGraph calculateTS(PluginContext context, XLog log, Petrinet net) throws ConnectionCannotBeObtained, ParseException { + Marking initialMarking = guessInitialMarking(net); + logSize = log.size(); + + return calculateTS(context, log, net, initialMarking, PetrinetSemanticsFactory.regularPetrinetSemantics(Petrinet.class)); + } + + public static Marking guessInitialMarking(Petrinet net) { + Marking result = new Marking(); + for (Place p : net.getPlaces()) { + if (net.getInEdges(p).isEmpty()) { + result.add(p); + } + } + return result; + } + + public ReachabilityGraph calculateTS(PluginContext context, XLog log, Petrinet net, Marking state, PetrinetSemantics semantics) + throws ConnectionCannotBeObtained, ParseException { + semantics.initialize(net.getTransitions(), new Marking(state)); + return buildAndConnect(context, log, net, state, semantics, null); + } + + private ReachabilityGraph buildAndConnect(PluginContext context, + XLog log, + PetrinetGraph net, + Marking initial, + Semantics semantics, CoverabilityGraph coverabilityGraph) + throws ConnectionCannotBeObtained, ParseException { + context.getConnectionManager().getFirstConnection(InitialMarkingConnection.class, context, net, initial); + + ReachabilityGraph ts = null; + + + NetAnalysisInformation.BOUNDEDNESS info = null; + + try { + BoundednessInfoConnection analysis = context.getConnectionManager().getFirstConnection( + BoundednessInfoConnection.class, context, net, initial, semantics); + info = analysis.getObjectWithRole(BehavioralAnalysisInformationConnection.NETANALYSISINFORMATION); + } catch (Exception e) { + // No connections available + } + + if ((info != null) && info.getValue().equals(NetAnalysisInformation.UnDetBool.FALSE)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + + if (coverabilityGraph != null) {// && !bounded) { + if (!BoundednessAnalyzer.isBounded(coverabilityGraph)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + // clone the graph and return + Map mapping = new HashMap(); + + ts = new ReachabilityGraph("StateSpace of " + net.getLabel()); + for (Object o : coverabilityGraph.getStates()) { + CTMarking m = (CTMarking) o; + Marking tsm = new Marking(m); + ts.addState(tsm); + mapping.put(m, tsm); + } + for (org.processmining.slpnminer.models.reachabilitygraph.Transition e : coverabilityGraph + .getEdges()) { + Marking source = mapping.get(e.getSource().getIdentifier()); + Marking target = mapping.get(e.getTarget().getIdentifier()); + ts.addTransition(source, target, e.getIdentifier(),e.getVisibility()); + } + } + + StartStateSet startStates = new StartStateSet(); + startStates.add(initial); + + if (ts == null) { + ts = doBreadthFirst(context, net.getLabel(), initial, semantics, MAXSTATES); + } + if (ts == null) { + // Problem with the reachability graph. + context.getFutureResult(0).cancel(true); + return null; + } + + AcceptStateSet acceptingStates = new AcceptStateSet(); + for (State state : ts.getNodes()) { + if (ts.getOutEdges(state).isEmpty()) { + acceptingStates.add(state.getIdentifier()); + } + } + + Marking[] markings = ts.getStates().toArray(new Marking[0]); + ReachabilitySet rs = new ReachabilitySet(markings); + + context.addConnection(new ReachabilitySetConnection(net, initial, rs, semantics, "Reachability Set")); + context.addConnection(new StateSpaceConnection(net, initial, ts, semantics)); + context.addConnection(new ReachabilityConnection(ts, startStates, acceptingStates)); + context.addConnection(new DeadMarkingConnection(net, initial, acceptingStates, semantics)); + + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + +// get all the transitions + int transIdx = 0; + HashMap tm = new HashMap<>(); + HashMap tmap = new HashMap<>(); + + + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getEdges()){ + tm.put(t.getIdentifier(), "t" + transIdx); + tmap.put(t.getLabel(), "t" + transIdx); + transIdx ++; + } + +// get the probability of transition in the reachability graph of Petri net + HashMap> tProbMap = getProbabilityFromPetriRG(ts, tm); + + + Object[] traceObj; + traceVariantMap = new HashMap(); + + System.out.println("1. Start log iteration"); + + int j = 0; + for (XTrace trace : log) { + Petrinet traceNet = new PetrinetImpl("trace net"); + Marking initialMarking = new Marking(); + String traceStr = ""; + + if (!trace.isEmpty()) { +// get trace variant + XAttribute tempEv; + for(XEvent e: trace) { + tempEv = e.getAttributes().get("concept:name"); + traceStr = traceStr.concat(tempEv.toString()+" "); + } + + if(traceVariantMap.containsKey(traceStr)) { + Double count = traceVariantMap.get(traceStr) + 1; + traceVariantMap.put(traceStr,count); + continue; + } + else { + traceVariantMap.put(traceStr, 1.0); + } + Place currentPlace = traceNet.addPlace("p" + String.valueOf(0)); + initialMarking.add(currentPlace); + for (int i = 0; i < trace.size(); i++) { + Place nextPlace = traceNet.addPlace("p" + String.valueOf(i + 1)); + Transition t = traceNet.addTransition(String.valueOf(trace.get(i).getAttributes().get("concept:name"))); + t.setInvisible(false); + traceNet.addArc(currentPlace, t); + traceNet.addArc(t, nextPlace); + currentPlace = nextPlace; + } + } + + traceObj = calculateTSForTrace(context, traceNet, initialMarking, PetrinetSemanticsFactory.regularPetrinetSemantics(Petrinet.class)); + State initStateForPetri = new State(initial,ts); + CrossProductImpl CP = new CrossProductImpl(); + + + Object[] obj = CP.getCrossProduct( + ts, + initStateForPetri, + acceptingStates, + (ReachabilityGraph) traceObj[0], + (State) traceObj[2], + (AcceptStateSet) traceObj[3]); + + if(obj!=null) { + ReachabilityGraph another_ts = (ReachabilityGraph)obj[0]; + return another_ts; + } + } + System.out.println("It is null"); + return null; + } + + + private static String getTransformedString(String expression) { + // Define the pattern for detecting variables with exponentiation + Pattern pattern = Pattern.compile("([a-zA-Z]\\d*)\\^\\d+"); + + // Create a matcher with the given expression + Matcher matcher = pattern.matcher(expression); + + String expression2 = expression; + // Find and print all matches + while (matcher.find()) { +// System.out.println("Variable with exponentiation: " + transformPowerExpression(matcher.group())); + expression2 = expression2.replace(matcher.group(), transformPowerExpression(matcher.group())); + } + System.out.println("\n the expression after transformation: "+expression2); + + return expression2; + } + + private static String transformPowerExpression(String powerExpression) { + // Split the expression into variable and exponent parts + String[] parts = powerExpression.split("\\^"); + + // Extract variable and exponent + String variable = parts[0]; + int exponent = Integer.parseInt(parts[1]); + + // Create the transformed expression + StringBuilder transformedExpression = new StringBuilder(); + for (int i = 0; i < exponent; i++) { + transformedExpression.append(variable); + if (i < exponent - 1) { + transformedExpression.append("*"); + } + } + + return transformedExpression.toString(); + } + + private HashMap> getProbabilityFromPetriRG( + ReachabilityGraph ts, + HashMap tm) { + + HashMap> tProbMap = new HashMap>(); + + // TODO Auto-generated method stub + for(State state: ts.getNodes()) { + // for each outgoing edges, get the probability + String result = "("; + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + result = result.concat(tm.get(t.getIdentifier())+"+"); + } + result = result.substring(0, result.length()-1); + result = result.concat(")"); + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + if(tProbMap.containsKey(tm.get(t.getIdentifier()))) { + tProbMap.get(tm.get(t.getIdentifier())).put(state.getLabel(), tm.get(t.getIdentifier())+"/"+result); + + } + else { + HashMap tempMap = new HashMap(); + tempMap.put(state.getLabel(), tm.get(t.getIdentifier())+"/"+result); + tProbMap.put(tm.get(t.getIdentifier()), tempMap); + } + } + + + } + +// + System.out.println("Get all probability map"); + for(String key1: tProbMap.keySet()){ + for(Object obj1: tProbMap.get(key1).keySet()) { + System.out.println("key: "+ key1 +" state: "+ obj1+" trans prob: "+tProbMap.get(key1).get(obj1)); + } + } + + return tProbMap; + } + + public Object[] calculateTSForTrace(PluginContext context, Petrinet net, Marking state, PetrinetSemantics semantics) + throws ConnectionCannotBeObtained { + semantics.initialize(net.getTransitions(), new Marking(state)); + return buildAndConnectForTrace(context, net, state, semantics, null); + } + + + private Object[] buildAndConnectForTrace(PluginContext context, PetrinetGraph net, Marking initial, + Semantics semantics, CoverabilityGraph coverabilityGraph) + throws ConnectionCannotBeObtained { + context.getConnectionManager().getFirstConnection(InitialMarkingConnection.class, context, net, initial); + ReachabilityGraph ts = null; + NetAnalysisInformation.BOUNDEDNESS info = null; + + try { + BoundednessInfoConnection analysis = context.getConnectionManager().getFirstConnection( + BoundednessInfoConnection.class, context, net, initial, semantics); + info = analysis.getObjectWithRole(BehavioralAnalysisInformationConnection.NETANALYSISINFORMATION); + } catch (Exception e) { + // No connections available + } + + if ((info != null) && info.getValue().equals(NetAnalysisInformation.UnDetBool.FALSE)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + + if (coverabilityGraph != null) {// && !bounded) { + if (!BoundednessAnalyzer.isBounded(coverabilityGraph)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + // clone the graph and return + Map mapping = new HashMap(); + + ts = new ReachabilityGraph("StateSpace of " + net.getLabel()); + for (Object o : coverabilityGraph.getStates()) { + CTMarking m = (CTMarking) o; + Marking tsm = new Marking(m); + ts.addState(tsm); + mapping.put(m, tsm); + } + for (org.processmining.slpnminer.models.reachabilitygraph.Transition e : coverabilityGraph.getEdges()) { + Marking source = mapping.get(e.getSource().getIdentifier()); + Marking target = mapping.get(e.getTarget().getIdentifier()); + ts.addTransition(source, target, e.getIdentifier(), e.getVisibility()); + } + + } + + StartStateSet startStates = new StartStateSet(); + startStates.add(initial); + + if (ts == null) { + ts = doBreadthFirst(context, net.getLabel(), initial, semantics, MAXSTATES); + } + if (ts == null) { + // Problem with the reachability graph. + context.getFutureResult(0).cancel(true); + return null; + } + + AcceptStateSet acceptingStates = new AcceptStateSet(); + for (State state : ts.getNodes()) { + if (ts.getOutEdges(state).isEmpty()) { + state.setAccepting(true); + acceptingStates.add(state.getIdentifier()); + } + } + + Marking[] markings = ts.getStates().toArray(new Marking[0]); + ReachabilitySet rs = new ReachabilitySet(markings); + + context.addConnection(new ReachabilitySetConnection(net, initial, rs, semantics, "Reachability Set")); + context.addConnection(new StateSpaceConnection(net, initial, ts, semantics)); + context.addConnection(new ReachabilityConnection(ts, startStates, acceptingStates)); + context.addConnection(new DeadMarkingConnection(net, initial, acceptingStates, semantics)); + +// context.getFutureResult(0).setLabel("Reachability graph of " + net.getLabel()); +// context.getFutureResult(1).setLabel("Reachability set of " + net.getLabel()); +// context.getFutureResult(2).setLabel("Initial states of " + ts.getLabel()); +// context.getFutureResult(3).setLabel("Accepting states of " + ts.getLabel()); + + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + State initStateForTrace = new State(initial,ts); + + return new Object[]{ts, rs, initStateForTrace, acceptingStates}; + } + + private ReachabilityGraph doBreadthFirst(PluginContext context, String label, Marking state, + Semantics semantics, int max) { + ReachabilityGraph ts = new ReachabilityGraph("StateSpace of " + label); + ts.addState(state); + Queue newStates = new LinkedList(); + newStates.add(state); + do { + newStates.addAll(extend(ts, newStates.poll(), semantics, context)); + } while (!newStates.isEmpty() && (ts.getStates().size() < max)); + if (!newStates.isEmpty()) { + // This net has been shows to be unbounded on this marking + context.log("The behaviour of the given net is has over " + max + " states. Aborting...", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + return null; + } + return ts; + + } + + + private Set extend(ReachabilityGraph ts, + Marking state, + Semantics semantics, + PluginContext context) { + Set newStates = new HashSet(); + semantics.setCurrentState(state); + for (Transition t : semantics.getExecutableTransitions()) { + semantics.setCurrentState(state); + try { + /* + * [HV] The local variable info is never read + * ExecutionInformation info = + */ + semantics.executeExecutableTransition(t); + // context.log(info.toString(), MessageLevel.DEBUG); + } catch (IllegalTransitionException e) { + context.log(e); + assert (false); + } + Marking newState = semantics.getCurrentState(); + + if (ts.addState(newState)) { + newStates.add(newState); + int size = ts.getEdges().size(); + if (size % 1000 == 0) { + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + } + } + ts.addTransition(state, newState, t, t.getLabel(), t.isInvisible()); + + semantics.setCurrentState(state); + } + return newStates; + } + + public static double getUEMSC(double[] t){ + for(int i=0;i < modelTransitionNum; i++) { + strToDouble.put("t"+String.valueOf(i), t[i]); + } + + if (mapFlag) { + for(int i=0;i < modelTransitionNum; i++) { + strToDouble.put("t"+String.valueOf(i), t[i]); + } + for(String s:traceVariantMap.keySet()) { + if (!isolatedVariantMap.get(s).equals("0")) { + System.out.println("find trace: "+s+" with freq: "+(double)traceVariantMap.get(s)/logSize); + } + } + mapFlag = false; + } + + double val = 1; + + for(String s:traceVariantMap.keySet()) { + if (!isolatedVariantMap.get(s).equals("0")) { + val -= Math.max(StrToExp.converStringToMathExp(isolatedVariantMap.get(s),strToDouble) - (double)traceVariantMap.get(s)/logSize, 0); + } + } + return val; + } +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/plugins/ErDiscovery.java b/src/org/processmining/slpnminer/plugins/ErDiscovery.java new file mode 100644 index 0000000..431fa9d --- /dev/null +++ b/src/org/processmining/slpnminer/plugins/ErDiscovery.java @@ -0,0 +1,691 @@ +package org.processmining.slpnminer.plugins; + +import gnu.trove.map.TObjectIntMap; +import gnu.trove.map.hash.TObjectIntHashMap; +import org.apache.commons.math4.legacy.analysis.MultivariateFunction; +import org.apache.commons.math4.legacy.optim.InitialGuess; +import org.apache.commons.math4.legacy.optim.MaxEval; +import org.apache.commons.math4.legacy.optim.SimpleBounds; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.GoalType; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.ObjectiveFunction; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.PopulationSize; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.Sigma; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.noderiv.BOBYQAOptimizer; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.noderiv.CMAESOptimizer; +import org.apache.commons.rng.UniformRandomProvider; +import org.apache.commons.rng.simple.RandomSource; +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; + +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.framework.plugin.events.Logger; +import org.processmining.models.connections.petrinets.behavioral.*; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.analysis.NetAnalysisInformation; +import org.processmining.models.graphbased.directed.petrinet.analysis.ReachabilitySet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.graphbased.directed.petrinet.impl.PetrinetImpl; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.CTMarking; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.models.semantics.petrinet.PetrinetSemantics; +import org.processmining.models.semantics.petrinet.impl.PetrinetSemanticsFactory; +import org.processmining.slpnminer.connections.ReachabilityConnection; +import org.processmining.slpnminer.models.BoundednessAnalyzer; +import org.processmining.slpnminer.models.reachabilitygraph.AcceptStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.CoverabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.CrossProductImpl; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.StartStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.State; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeightsEditable; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeightsImpl; +import org.processmining.slpnminer.connections.DeadMarkingConnection; +import org.processmining.slpnminer.connections.StateSpaceConnection; +import org.processmining.slpnminer.helpers.EquationSystems; +import org.processmining.slpnminer.helpers.IsolateVariable; +import org.processmining.slpnminer.helpers.StrToExp; + +import java.text.ParseException; +import java.util.*; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +@Plugin(name = "Discover SLPN with Entropic Relevance", + returnLabels = { "stochastic labelled Petri net" }, + returnTypes = { StochasticLabelledPetriNetSimpleWeightsEditable.class}, + parameterLabels = { "net", "log"}) +public class ErDiscovery { + + private static final int MAXSTATES = 25000; + + private static int modelTransitionNum; + + private static int logSize; + + private static HashMap strToDouble = new HashMap<>(); + + private static HashMap traceVariantMap; + + private static HashMap isolatedVariantMap = new HashMap(); + + private static Boolean mapFlag = true; + + private static Collection netTransitionCollection; + + private static HashMap tTsIdToPetriIdMap = new HashMap(); + + + + @UITopiaVariant(affiliation = UITopiaVariant.EHV, author = "Tian Li", email = "t.li@bpm.rwth-aachen.de", pack = "WeightEstimation") + @PluginVariant(requiredParameterLabels = {0, 1}) + public StochasticLabelledPetriNetSimpleWeightsEditable calculateTS(PluginContext context, XLog log, Petrinet net) throws ConnectionCannotBeObtained, ParseException { + + + Marking initialMarking = guessInitialMarking(net); + logSize = log.size(); + modelTransitionNum = net.getTransitions().size(); + netTransitionCollection = net.getTransitions(); + return calculateTS(context, log, net, initialMarking, PetrinetSemanticsFactory.regularPetrinetSemantics(Petrinet.class)); + } + + public static Marking guessInitialMarking(Petrinet net) { + Marking result = new Marking(); + for (Place p : net.getPlaces()) { + if (net.getInEdges(p).isEmpty()) { + result.add(p); + } + } + return result; + } + + public StochasticLabelledPetriNetSimpleWeightsEditable calculateTS(PluginContext context, XLog log, Petrinet net, Marking state, PetrinetSemantics semantics) + throws ConnectionCannotBeObtained, ParseException { + semantics.initialize(net.getTransitions(), new Marking(state)); + return buildAndConnect(context, log, net, state, semantics, null); + } + + private StochasticLabelledPetriNetSimpleWeightsEditable buildAndConnect(PluginContext context, + XLog log, + PetrinetGraph net, + Marking initial, + Semantics semantics, CoverabilityGraph coverabilityGraph) + throws ConnectionCannotBeObtained, ParseException { + context.getConnectionManager().getFirstConnection(InitialMarkingConnection.class, context, net, initial); + + ReachabilityGraph ts = null; + + + NetAnalysisInformation.BOUNDEDNESS info = null; + + try { + BoundednessInfoConnection analysis = context.getConnectionManager().getFirstConnection( + BoundednessInfoConnection.class, context, net, initial, semantics); + info = analysis.getObjectWithRole(BehavioralAnalysisInformationConnection.NETANALYSISINFORMATION); + } catch (Exception e) { + // No connections available + } + + if ((info != null) && info.getValue().equals(NetAnalysisInformation.UnDetBool.FALSE)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + + if (coverabilityGraph != null) {// && !bounded) { + if (!BoundednessAnalyzer.isBounded(coverabilityGraph)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + // clone the graph and return + Map mapping = new HashMap(); + + ts = new ReachabilityGraph("StateSpace of " + net.getLabel()); + for (Object o : coverabilityGraph.getStates()) { + CTMarking m = (CTMarking) o; + Marking tsm = new Marking(m); + ts.addState(tsm); + mapping.put(m, tsm); + } + for (org.processmining.slpnminer.models.reachabilitygraph.Transition e : coverabilityGraph + .getEdges()) { + Marking source = mapping.get(e.getSource().getIdentifier()); + Marking target = mapping.get(e.getTarget().getIdentifier()); + ts.addTransition(source, target, e.getIdentifier(),e.getVisibility()); + } + } + + StartStateSet startStates = new StartStateSet(); + startStates.add(initial); + + if (ts == null) { + ts = doBreadthFirst(context, net.getLabel(), initial, semantics, MAXSTATES); + } + if (ts == null) { + // Problem with the reachability graph. + context.getFutureResult(0).cancel(true); + return null; + } + + AcceptStateSet acceptingStates = new AcceptStateSet(); + for (State state : ts.getNodes()) { + if (ts.getOutEdges(state).isEmpty()) { + acceptingStates.add(state.getIdentifier()); + } + } + + Marking[] markings = ts.getStates().toArray(new Marking[0]); + ReachabilitySet rs = new ReachabilitySet(markings); + + context.addConnection(new ReachabilitySetConnection(net, initial, rs, semantics, "Reachability Set")); + context.addConnection(new StateSpaceConnection(net, initial, ts, semantics)); + context.addConnection(new ReachabilityConnection(ts, startStates, acceptingStates)); + context.addConnection(new DeadMarkingConnection(net, initial, acceptingStates, semantics)); + + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + +// get all the transitions + HashMap tm = new HashMap<>(); + HashMap tmap = new HashMap<>(); + + int transIdx = 0; + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getEdges()){ + +// System.out.println("iterate transition in ts: " + t.getIdentifier() + " " + t.getLabel()); + if(tmap.containsKey(t.getIdentifier().toString())) { + tm.put(t.getIdentifier(), tmap.get(t.getIdentifier().toString())); + } + else { + tm.put(t.getIdentifier(), "t" + transIdx); + tmap.put(t.getIdentifier().toString(), "t" + transIdx); + transIdx ++; + } + } +// get the probability of transition in the reachability graph of Petri net + Object[] obj2 = getProbabilityFromPetriRG(ts, tm); + HashMap> tProbMap = (HashMap>) obj2[0]; + HashMap x2ProbMap = (HashMap) obj2[1]; + + + Object[] traceObj; + traceVariantMap = new HashMap(); + + Integer j = 0; + for (XTrace trace : log) { + Petrinet traceNet = new PetrinetImpl("trace net"); + Marking initialMarking = new Marking(); + String traceStr = ""; + + if (!trace.isEmpty()) { +// get trace variant + XAttribute tempEv; + for(XEvent e: trace) { + tempEv = e.getAttributes().get("concept:name"); + traceStr = traceStr.concat(tempEv.toString()+" "); + } + + if(traceVariantMap.containsKey(traceStr)) { + Double count = traceVariantMap.get(traceStr) + 1; + traceVariantMap.put(traceStr,count); + continue; + } + else { + traceVariantMap.put(traceStr, 1.0); + } + Place currentPlace = traceNet.addPlace("p" + String.valueOf(0)); + initialMarking.add(currentPlace); + for (int i = 0; i < trace.size(); i++) { + Place nextPlace = traceNet.addPlace("p" + String.valueOf(i + 1)); + Transition t = traceNet.addTransition(String.valueOf(trace.get(i).getAttributes().get("concept:name"))); + t.setInvisible(false); + traceNet.addArc(currentPlace, t); + traceNet.addArc(t, nextPlace); + currentPlace = nextPlace; + } + } + + + traceObj = calculateTSForTrace(context, traceNet, initialMarking, PetrinetSemanticsFactory.regularPetrinetSemantics(Petrinet.class)); + State initStateForPetri = new State(initial,ts); + CrossProductImpl CP = new CrossProductImpl(); + + + Object[] obj = CP.getCrossProduct( + ts, + initStateForPetri, + acceptingStates, + (ReachabilityGraph) traceObj[0], + (State) traceObj[2], + (AcceptStateSet) traceObj[3]); + + if(obj!=null) { + ReachabilityGraph another_ts = (ReachabilityGraph)obj[0]; + HashSet reachableState = another_ts.getReachableState(); + + Boolean noAcceptState = true; + for(State s: another_ts.getNodes()) { + if(s.isAccepting()) { + noAcceptState = false; + } + } + if(noAcceptState) { + isolatedVariantMap.put(traceStr,"0"); + j++; + continue; + } + + @SuppressWarnings("unchecked") + HashMap combiToPetri = (HashMap)obj[1]; + + EquationSystems eq = new EquationSystems(); + String assis1 = "Solve"; + Object[] resultObj = eq.getEqStr(another_ts, tm, tProbMap, combiToPetri, reachableState); + String eqSys = (String) resultObj[0]; + @SuppressWarnings("unchecked") + HashMap replTransProb = (HashMap)resultObj[1]; + + try { + ExecutorService executorService = Executors.newSingleThreadExecutor(); + String isolatedVar = null; + Future future = executorService.submit(() -> IsolateVariable.getIsolatedVar(assis1+eqSys)); + + try { + isolatedVar = future.get(30, TimeUnit.SECONDS); + } catch (TimeoutException e) { + System.out.println("Timeout computing the probability"); + isolatedVariantMap.put(traceStr,"0"); + } catch(Exception e){ + + }finally { + executorService.shutdown(); + } + + if (isolatedVar==null) { + j++; + isolatedVariantMap.put(traceStr,"0"); + continue; + } +// isolatedVar = isolatedVar.replaceAll("\\s+",""); + String transformedIsolatedVar = replaceExpressions(isolatedVar, replTransProb); +// System.out.println("the isolatated var before: "+isolatedVar); +// System.out.println("the "+String.valueOf(j+1)+"-th trace: " + traceStr); +// System.out.println("the isolatated var after: "+transformedIsolatedVar); + isolatedVariantMap.put(traceStr,transformedIsolatedVar); + } + catch(Exception e){ + + } + } + else { + isolatedVariantMap.put(traceStr,"0"); + } + j++; + } + + mapFlag = true; + MultivariateFunction fER = new MultivariateFunction() { + public double value(double[] x) { + return getER(x); + } + }; + + double[] lowerBound = new double[modelTransitionNum]; + double[] upperBound = new double[modelTransitionNum]; + double[] initGuess = new double[modelTransitionNum]; + double[] sigma = new double[modelTransitionNum]; + Arrays.fill(lowerBound, 0.0001); + Arrays.fill(upperBound, 1.0000); + Arrays.fill(initGuess, 1); + Arrays.fill(sigma, 0.1); + UniformRandomProvider rngG = RandomSource.MT_64.create(); + CMAESOptimizer optimizer = new CMAESOptimizer( + 1000000, + 0, + true, + modelTransitionNum, + 100, + rngG, + true, + null); + + double[] result1 = optimizer.optimize( + new MaxEval(10000), + new ObjectiveFunction(fER), + GoalType.MINIMIZE, + new PopulationSize((int) (4+3*Math.log(modelTransitionNum))), + new Sigma(sigma), + new InitialGuess(initGuess), + new SimpleBounds(lowerBound, upperBound)).getPoint(); + + StochasticLabelledPetriNetSimpleWeightsEditable result = new StochasticLabelledPetriNetSimpleWeightsImpl(); + + TObjectIntMap oldPlace2place = new TObjectIntHashMap<>(); + for (Place oldPlace : net.getPlaces()) { + int place = result.addPlace(); + oldPlace2place.put(oldPlace, place); + } + + for (Place oldPlace : initial) { + result.addPlaceToInitialMarking(oldPlace2place.get(oldPlace), initial.occurrences(oldPlace)); + } + + TObjectIntMap oldTransition2newTransition = new TObjectIntHashMap<>(); + for (Transition oldTransition : net.getTransitions()) { + int newTransition; + for(String tId:tmap.keySet()) { + if(tId.equals(oldTransition.getLocalID().toString())) { + String tr = tmap.get(tId); + String tr2 = tr.substring(tr.length()-1); + Integer idx = Integer.valueOf(tr2); + double weight = result1[idx]; + + if (oldTransition.isInvisible()) { + newTransition = result.addTransition(weight); + } else { + newTransition = result.addTransition(oldTransition.getLabel(), weight); + } + oldTransition2newTransition.put(oldTransition, newTransition); + break; + } + } + } + for (Transition oldTransition : net.getTransitions()) { + int newTransition = oldTransition2newTransition.get(oldTransition); + for (PetrinetEdge edge : net.getInEdges(oldTransition)) { + Place oldSource = (Place) edge.getSource(); + result.addPlaceTransitionArc(oldPlace2place.get(oldSource), newTransition); + } + for (PetrinetEdge edge : net.getOutEdges(oldTransition)) { + Place oldTarget = (Place) edge.getTarget(); + result.addTransitionPlaceArc(newTransition, oldPlace2place.get(oldTarget)); + } + } + return result; + } + + + private static String replaceExpressions(String input,HashMap varReplace) { + +// String input_without_power = transformPowerExpression(input); +// System.out.println("the var without exp: "+input_without_power); + + + StringBuffer sb = new StringBuffer(); + Pattern pattern = Pattern.compile("x\\d+"); + Matcher matcher = pattern.matcher(input); + + while (matcher.find()) { + String match = matcher.group(); + String replacement = varReplace.getOrDefault(match, match); + matcher.appendReplacement(sb, replacement); + } + + matcher.appendTail(sb); + return sb.toString(); + } + + private static String transformPowerExpression(String powerExpression) { + // Split the expression into variable and exponent parts + String[] parts = powerExpression.split("\\^"); + + // Extract variable and exponent + String variable = parts[0]; + int exponent = Integer.parseInt(parts[1]); + + // Create the transformed expression + StringBuilder transformedExpression = new StringBuilder(); + for (int i = 0; i < exponent; i++) { + transformedExpression.append(variable); + if (i < exponent - 1) { + transformedExpression.append("*"); + } + } + + return transformedExpression.toString(); + } + + private Object[] getProbabilityFromPetriRG( + ReachabilityGraph ts, + HashMap tm) { + + HashMap> tProbMap = new HashMap>(); + Integer i = 0; + HashMap x2ProbMap = new HashMap(); + + + // TODO Auto-generated method stub + for(State state: ts.getNodes()) { + // for each outgoing edges, get the probability + String result = "("; + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + result = result.concat(tm.get(t.getIdentifier())+"+"); + } + + result = result.substring(0, result.length()-1); + result = result.concat(")"); + + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + if(tProbMap.containsKey(tm.get(t.getIdentifier()))) { + + if(ts.getOutEdges(state).size()==1) {//if transition t is the only edge + tProbMap.get(tm.get(t.getIdentifier())).put(state.getLabel(), "1"); + i++; + } + else { + tProbMap.get(tm.get(t.getIdentifier())).put(state.getLabel(), tm.get(t.getIdentifier())+"/"+result); + i++; + } + } + else { + HashMap tempMap = new HashMap(); + if(ts.getOutEdges(state).size()==1) {//if transition t is the only edge + tempMap.put(state.getLabel(), "1"); + } + else { + tempMap.put(state.getLabel(), tm.get(t.getIdentifier())+"/"+result); + } + i++; + tProbMap.put(tm.get(t.getIdentifier()), tempMap); + } + } + } + + Object[] obj = new Object[2]; + obj[0] = tProbMap; + obj[1] = x2ProbMap; + return obj; + } + + public Object[] calculateTSForTrace(PluginContext context, Petrinet net, Marking state, PetrinetSemantics semantics) + throws ConnectionCannotBeObtained { + semantics.initialize(net.getTransitions(), new Marking(state)); + return buildAndConnectForTrace(context, net, state, semantics, null); + } + + + private Object[] buildAndConnectForTrace(PluginContext context, PetrinetGraph net, Marking initial, + Semantics semantics, CoverabilityGraph coverabilityGraph) + throws ConnectionCannotBeObtained { + context.getConnectionManager().getFirstConnection(InitialMarkingConnection.class, context, net, initial); + ReachabilityGraph ts = null; + NetAnalysisInformation.BOUNDEDNESS info = null; + + try { + BoundednessInfoConnection analysis = context.getConnectionManager().getFirstConnection( + BoundednessInfoConnection.class, context, net, initial, semantics); + info = analysis.getObjectWithRole(BehavioralAnalysisInformationConnection.NETANALYSISINFORMATION); + } catch (Exception e) { + // No connections available + } + + if ((info != null) && info.getValue().equals(NetAnalysisInformation.UnDetBool.FALSE)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + + if (coverabilityGraph != null) {// && !bounded) { + if (!BoundednessAnalyzer.isBounded(coverabilityGraph)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + // clone the graph and return + Map mapping = new HashMap(); + + ts = new ReachabilityGraph("StateSpace of " + net.getLabel()); + for (Object o : coverabilityGraph.getStates()) { + CTMarking m = (CTMarking) o; + Marking tsm = new Marking(m); + ts.addState(tsm); + mapping.put(m, tsm); + } + } + + StartStateSet startStates = new StartStateSet(); + startStates.add(initial); + + if (ts == null) { + ts = doBreadthFirst(context, net.getLabel(), initial, semantics, MAXSTATES); + } + if (ts == null) { + // Problem with the reachability graph. + context.getFutureResult(0).cancel(true); + return null; + } + + AcceptStateSet acceptingStates = new AcceptStateSet(); + for (State state : ts.getNodes()) { + if (ts.getOutEdges(state).isEmpty()) { + state.setAccepting(true); + acceptingStates.add(state.getIdentifier()); + } + } + + Marking[] markings = ts.getStates().toArray(new Marking[0]); + ReachabilitySet rs = new ReachabilitySet(markings); + + context.addConnection(new ReachabilitySetConnection(net, initial, rs, semantics, "Reachability Set")); + context.addConnection(new StateSpaceConnection(net, initial, ts, semantics)); + context.addConnection(new ReachabilityConnection(ts, startStates, acceptingStates)); + context.addConnection(new DeadMarkingConnection(net, initial, acceptingStates, semantics)); + + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + State initStateForTrace = new State(initial,ts); + + return new Object[]{ts, rs, initStateForTrace, acceptingStates}; + } + + private ReachabilityGraph doBreadthFirst(PluginContext context, String label, Marking state, + Semantics semantics, int max) { + ReachabilityGraph ts = new ReachabilityGraph("StateSpace of " + label); + ts.addState(state); + Queue newStates = new LinkedList(); + newStates.add(state); + do { + newStates.addAll(extend(ts, newStates.poll(), semantics, context)); + } while (!newStates.isEmpty() && (ts.getStates().size() < max)); + if (!newStates.isEmpty()) { + // This net has been shows to be unbounded on this marking + context.log("The behaviour of the given net is has over " + max + " states. Aborting...", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + return null; + } + return ts; + + } + + private Set extend(ReachabilityGraph ts, + Marking state, + Semantics semantics, + PluginContext context) { + Set newStates = new HashSet(); + semantics.setCurrentState(state); + for (Transition t : semantics.getExecutableTransitions()) { + semantics.setCurrentState(state); + try { + /* + * [HV] The local variable info is never read + * ExecutionInformation info = + */ + semantics.executeExecutableTransition(t); + // context.log(info.toString(), MessageLevel.DEBUG); + } catch (IllegalTransitionException e) { + context.log(e); + assert (false); + } + Marking newState = semantics.getCurrentState(); + + if (ts.addState(newState)) { + newStates.add(newState); + int size = ts.getEdges().size(); + if (size % 1000 == 0) { + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + } + } +// System.out.println("get id of trans: "+t.getLocalID()+" "+t.getLabel()); + + ts.addTransition(state, newState, t.getLocalID(), t.getLabel(), t.isInvisible()); + + semantics.setCurrentState(state); + } + return newStates; + } + + public static double getER(double[] t){ + for(int i=0;i < modelTransitionNum; i++) { + strToDouble.put("t"+String.valueOf(i), t[i]); + } + + if (mapFlag) { + for(String s:traceVariantMap.keySet()) { + if (!isolatedVariantMap.get(s).equals("0")) { + System.out.println("Trace: "+s+", with frequency: "+(double)traceVariantMap.get(s)/logSize); + } + } + mapFlag = false; + } + double val = 0; + for(String s:traceVariantMap.keySet()) { + if (!isolatedVariantMap.get(s).equals("0")) { + val -= Math.log(StrToExp.converStringToMathExp(isolatedVariantMap.get(s),strToDouble)) * (double)traceVariantMap.get(s)/logSize; + } + } + return val; + } + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/plugins/ErIterationDiscovery.java b/src/org/processmining/slpnminer/plugins/ErIterationDiscovery.java new file mode 100644 index 0000000..e05ace6 --- /dev/null +++ b/src/org/processmining/slpnminer/plugins/ErIterationDiscovery.java @@ -0,0 +1,691 @@ +package org.processmining.slpnminer.plugins; + +import gnu.trove.map.TObjectIntMap; +import gnu.trove.map.hash.TObjectIntHashMap; +import org.apache.commons.math4.legacy.analysis.MultivariateFunction; +import org.apache.commons.math4.legacy.optim.InitialGuess; +import org.apache.commons.math4.legacy.optim.MaxEval; +import org.apache.commons.math4.legacy.optim.SimpleBounds; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.GoalType; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.ObjectiveFunction; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.PopulationSize; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.Sigma; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.noderiv.BOBYQAOptimizer; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.noderiv.CMAESOptimizer; +import org.apache.commons.rng.UniformRandomProvider; +import org.apache.commons.rng.simple.RandomSource; +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; + +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.framework.plugin.events.Logger; +import org.processmining.models.connections.petrinets.behavioral.*; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.analysis.NetAnalysisInformation; +import org.processmining.models.graphbased.directed.petrinet.analysis.ReachabilitySet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.graphbased.directed.petrinet.impl.PetrinetImpl; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.CTMarking; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.models.semantics.petrinet.PetrinetSemantics; +import org.processmining.models.semantics.petrinet.impl.PetrinetSemanticsFactory; +import org.processmining.slpnminer.connections.ReachabilityConnection; +import org.processmining.slpnminer.models.BoundednessAnalyzer; +import org.processmining.slpnminer.models.reachabilitygraph.AcceptStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.CoverabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.CrossProductImpl; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.StartStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.State; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeightsEditable; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeightsImpl; +import org.processmining.slpnminer.connections.DeadMarkingConnection; +import org.processmining.slpnminer.connections.StateSpaceConnection; +import org.processmining.slpnminer.helpers.EquationSystems; +import org.processmining.slpnminer.helpers.IsolateVariable; +import org.processmining.slpnminer.helpers.StrToExp; + +import java.text.ParseException; +import java.util.*; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +@Plugin(name = "Discover SLPN with Entropic Relevance", + returnLabels = { "stochastic labelled Petri net" }, + returnTypes = { StochasticLabelledPetriNetSimpleWeightsEditable.class}, + parameterLabels = { "net", "log"}) +public class ErIterationDiscovery { + + private static final int MAXSTATES = 25000; + + private static int modelTransitionNum; + + private static int logSize; + + private static HashMap strToDouble = new HashMap<>(); + + private static HashMap traceVariantMap; + + private static HashMap isolatedVariantMap = new HashMap(); + + private static Boolean mapFlag = true; + + private static Collection netTransitionCollection; + + private static HashMap tTsIdToPetriIdMap = new HashMap(); + + + + @UITopiaVariant(affiliation = UITopiaVariant.EHV, author = "Tian Li", email = "t.li@bpm.rwth-aachen.de", pack = "WeightEstimation") + @PluginVariant(requiredParameterLabels = {0, 1}) + public StochasticLabelledPetriNetSimpleWeightsEditable calculateTS(PluginContext context, XLog log, Petrinet net) throws ConnectionCannotBeObtained, ParseException { + + + Marking initialMarking = guessInitialMarking(net); + logSize = log.size(); + modelTransitionNum = net.getTransitions().size(); + netTransitionCollection = net.getTransitions(); + return calculateTS(context, log, net, initialMarking, PetrinetSemanticsFactory.regularPetrinetSemantics(Petrinet.class)); + } + + public static Marking guessInitialMarking(Petrinet net) { + Marking result = new Marking(); + for (Place p : net.getPlaces()) { + if (net.getInEdges(p).isEmpty()) { + result.add(p); + } + } + return result; + } + + public StochasticLabelledPetriNetSimpleWeightsEditable calculateTS(PluginContext context, XLog log, Petrinet net, Marking state, PetrinetSemantics semantics) + throws ConnectionCannotBeObtained, ParseException { + semantics.initialize(net.getTransitions(), new Marking(state)); + return buildAndConnect(context, log, net, state, semantics, null); + } + + private StochasticLabelledPetriNetSimpleWeightsEditable buildAndConnect(PluginContext context, + XLog log, + PetrinetGraph net, + Marking initial, + Semantics semantics, CoverabilityGraph coverabilityGraph) + throws ConnectionCannotBeObtained, ParseException { + context.getConnectionManager().getFirstConnection(InitialMarkingConnection.class, context, net, initial); + + ReachabilityGraph ts = null; + + + NetAnalysisInformation.BOUNDEDNESS info = null; + + try { + BoundednessInfoConnection analysis = context.getConnectionManager().getFirstConnection( + BoundednessInfoConnection.class, context, net, initial, semantics); + info = analysis.getObjectWithRole(BehavioralAnalysisInformationConnection.NETANALYSISINFORMATION); + } catch (Exception e) { + // No connections available + } + + if ((info != null) && info.getValue().equals(NetAnalysisInformation.UnDetBool.FALSE)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + + if (coverabilityGraph != null) {// && !bounded) { + if (!BoundednessAnalyzer.isBounded(coverabilityGraph)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + // clone the graph and return + Map mapping = new HashMap(); + + ts = new ReachabilityGraph("StateSpace of " + net.getLabel()); + for (Object o : coverabilityGraph.getStates()) { + CTMarking m = (CTMarking) o; + Marking tsm = new Marking(m); + ts.addState(tsm); + mapping.put(m, tsm); + } + for (org.processmining.slpnminer.models.reachabilitygraph.Transition e : coverabilityGraph + .getEdges()) { + Marking source = mapping.get(e.getSource().getIdentifier()); + Marking target = mapping.get(e.getTarget().getIdentifier()); + ts.addTransition(source, target, e.getIdentifier(),e.getVisibility()); + } + } + + StartStateSet startStates = new StartStateSet(); + startStates.add(initial); + + if (ts == null) { + ts = doBreadthFirst(context, net.getLabel(), initial, semantics, MAXSTATES); + } + if (ts == null) { + // Problem with the reachability graph. + context.getFutureResult(0).cancel(true); + return null; + } + + AcceptStateSet acceptingStates = new AcceptStateSet(); + for (State state : ts.getNodes()) { + if (ts.getOutEdges(state).isEmpty()) { + acceptingStates.add(state.getIdentifier()); + } + } + + Marking[] markings = ts.getStates().toArray(new Marking[0]); + ReachabilitySet rs = new ReachabilitySet(markings); + + context.addConnection(new ReachabilitySetConnection(net, initial, rs, semantics, "Reachability Set")); + context.addConnection(new StateSpaceConnection(net, initial, ts, semantics)); + context.addConnection(new ReachabilityConnection(ts, startStates, acceptingStates)); + context.addConnection(new DeadMarkingConnection(net, initial, acceptingStates, semantics)); + + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + +// get all the transitions + HashMap tm = new HashMap<>(); + HashMap tmap = new HashMap<>(); + + int transIdx = 0; + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getEdges()){ + +// System.out.println("iterate transition in ts: " + t.getIdentifier() + " " + t.getLabel()); + if(tmap.containsKey(t.getIdentifier().toString())) { + tm.put(t.getIdentifier(), tmap.get(t.getIdentifier().toString())); + } + else { + tm.put(t.getIdentifier(), "t" + transIdx); + tmap.put(t.getIdentifier().toString(), "t" + transIdx); + transIdx ++; + } + } +// get the probability of transition in the reachability graph of Petri net + Object[] obj2 = getProbabilityFromPetriRG(ts, tm); + HashMap> tProbMap = (HashMap>) obj2[0]; + HashMap x2ProbMap = (HashMap) obj2[1]; + + + Object[] traceObj; + traceVariantMap = new HashMap(); + + Integer j = 0; + for (XTrace trace : log) { + Petrinet traceNet = new PetrinetImpl("trace net"); + Marking initialMarking = new Marking(); + String traceStr = ""; + + if (!trace.isEmpty()) { +// get trace variant + XAttribute tempEv; + for(XEvent e: trace) { + tempEv = e.getAttributes().get("concept:name"); + traceStr = traceStr.concat(tempEv.toString()+" "); + } + + if(traceVariantMap.containsKey(traceStr)) { + Double count = traceVariantMap.get(traceStr) + 1; + traceVariantMap.put(traceStr,count); + continue; + } + else { + traceVariantMap.put(traceStr, 1.0); + } + Place currentPlace = traceNet.addPlace("p" + String.valueOf(0)); + initialMarking.add(currentPlace); + for (int i = 0; i < trace.size(); i++) { + Place nextPlace = traceNet.addPlace("p" + String.valueOf(i + 1)); + Transition t = traceNet.addTransition(String.valueOf(trace.get(i).getAttributes().get("concept:name"))); + t.setInvisible(false); + traceNet.addArc(currentPlace, t); + traceNet.addArc(t, nextPlace); + currentPlace = nextPlace; + } + } + + + traceObj = calculateTSForTrace(context, traceNet, initialMarking, PetrinetSemanticsFactory.regularPetrinetSemantics(Petrinet.class)); + State initStateForPetri = new State(initial,ts); + CrossProductImpl CP = new CrossProductImpl(); + + + Object[] obj = CP.getCrossProduct( + ts, + initStateForPetri, + acceptingStates, + (ReachabilityGraph) traceObj[0], + (State) traceObj[2], + (AcceptStateSet) traceObj[3]); + + if(obj!=null) { + ReachabilityGraph another_ts = (ReachabilityGraph)obj[0]; + HashSet reachableState = another_ts.getReachableState(); + + Boolean noAcceptState = true; + for(State s: another_ts.getNodes()) { + if(s.isAccepting()) { + noAcceptState = false; + } + } + if(noAcceptState) { + isolatedVariantMap.put(traceStr,"0"); + j++; + continue; + } + + @SuppressWarnings("unchecked") + HashMap combiToPetri = (HashMap)obj[1]; + + EquationSystems eq = new EquationSystems(); + String assis1 = "Solve"; + Object[] resultObj = eq.getEqStr(another_ts, tm, tProbMap, combiToPetri, reachableState); + String eqSys = (String) resultObj[0]; + @SuppressWarnings("unchecked") + HashMap replTransProb = (HashMap)resultObj[1]; + + try { + ExecutorService executorService = Executors.newSingleThreadExecutor(); + String isolatedVar = null; + Future future = executorService.submit(() -> IsolateVariable.getIsolatedVar(assis1+eqSys)); + + try { + isolatedVar = future.get(30, TimeUnit.SECONDS); + } catch (TimeoutException e) { + System.out.println("Timeout computing the probability"); + isolatedVariantMap.put(traceStr,"0"); + } catch(Exception e){ + + }finally { + executorService.shutdown(); + } + + if (isolatedVar==null) { + j++; + isolatedVariantMap.put(traceStr,"0"); + continue; + } +// isolatedVar = isolatedVar.replaceAll("\\s+",""); + String transformedIsolatedVar = replaceExpressions(isolatedVar, replTransProb); +// System.out.println("the isolatated var before: "+isolatedVar); +// System.out.println("the "+String.valueOf(j+1)+"-th trace: " + traceStr); +// System.out.println("the isolatated var after: "+transformedIsolatedVar); + isolatedVariantMap.put(traceStr,transformedIsolatedVar); + } + catch(Exception e){ + + } + } + else { + isolatedVariantMap.put(traceStr,"0"); + } + j++; + } + + mapFlag = true; + MultivariateFunction fER = new MultivariateFunction() { + public double value(double[] x) { + return getER(x); + } + }; + + double[] lowerBound = new double[modelTransitionNum]; + double[] upperBound = new double[modelTransitionNum]; + double[] initGuess = new double[modelTransitionNum]; + double[] sigma = new double[modelTransitionNum]; + Arrays.fill(lowerBound, 0.0001); + Arrays.fill(upperBound, 1.0000); + Arrays.fill(initGuess, 1); + Arrays.fill(sigma, 0.1); + UniformRandomProvider rngG = RandomSource.MT_64.create(); + CMAESOptimizer optimizer = new CMAESOptimizer( + 1000000, + 0, + true, + modelTransitionNum, + 100, + rngG, + true, + null); + + double[] result1 = optimizer.optimize( + new MaxEval(10000), + new ObjectiveFunction(fER), + GoalType.MINIMIZE, + new PopulationSize((int) (4+3*Math.log(modelTransitionNum))), + new Sigma(sigma), + new InitialGuess(initGuess), + new SimpleBounds(lowerBound, upperBound)).getPoint(); + + StochasticLabelledPetriNetSimpleWeightsEditable result = new StochasticLabelledPetriNetSimpleWeightsImpl(); + + TObjectIntMap oldPlace2place = new TObjectIntHashMap<>(); + for (Place oldPlace : net.getPlaces()) { + int place = result.addPlace(); + oldPlace2place.put(oldPlace, place); + } + + for (Place oldPlace : initial) { + result.addPlaceToInitialMarking(oldPlace2place.get(oldPlace), initial.occurrences(oldPlace)); + } + + TObjectIntMap oldTransition2newTransition = new TObjectIntHashMap<>(); + for (Transition oldTransition : net.getTransitions()) { + int newTransition; + for(String tId:tmap.keySet()) { + if(tId.equals(oldTransition.getLocalID().toString())) { + String tr = tmap.get(tId); + String tr2 = tr.substring(tr.length()-1); + Integer idx = Integer.valueOf(tr2); + double weight = result1[idx]; + + if (oldTransition.isInvisible()) { + newTransition = result.addTransition(weight); + } else { + newTransition = result.addTransition(oldTransition.getLabel(), weight); + } + oldTransition2newTransition.put(oldTransition, newTransition); + break; + } + } + } + for (Transition oldTransition : net.getTransitions()) { + int newTransition = oldTransition2newTransition.get(oldTransition); + for (PetrinetEdge edge : net.getInEdges(oldTransition)) { + Place oldSource = (Place) edge.getSource(); + result.addPlaceTransitionArc(oldPlace2place.get(oldSource), newTransition); + } + for (PetrinetEdge edge : net.getOutEdges(oldTransition)) { + Place oldTarget = (Place) edge.getTarget(); + result.addTransitionPlaceArc(newTransition, oldPlace2place.get(oldTarget)); + } + } + return result; + } + + + private static String replaceExpressions(String input,HashMap varReplace) { + +// String input_without_power = transformPowerExpression(input); +// System.out.println("the var without exp: "+input_without_power); + + + StringBuffer sb = new StringBuffer(); + Pattern pattern = Pattern.compile("x\\d+"); + Matcher matcher = pattern.matcher(input); + + while (matcher.find()) { + String match = matcher.group(); + String replacement = varReplace.getOrDefault(match, match); + matcher.appendReplacement(sb, replacement); + } + + matcher.appendTail(sb); + return sb.toString(); + } + + private static String transformPowerExpression(String powerExpression) { + // Split the expression into variable and exponent parts + String[] parts = powerExpression.split("\\^"); + + // Extract variable and exponent + String variable = parts[0]; + int exponent = Integer.parseInt(parts[1]); + + // Create the transformed expression + StringBuilder transformedExpression = new StringBuilder(); + for (int i = 0; i < exponent; i++) { + transformedExpression.append(variable); + if (i < exponent - 1) { + transformedExpression.append("*"); + } + } + + return transformedExpression.toString(); + } + + private Object[] getProbabilityFromPetriRG( + ReachabilityGraph ts, + HashMap tm) { + + HashMap> tProbMap = new HashMap>(); + Integer i = 0; + HashMap x2ProbMap = new HashMap(); + + + // TODO Auto-generated method stub + for(State state: ts.getNodes()) { + // for each outgoing edges, get the probability + String result = "("; + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + result = result.concat(tm.get(t.getIdentifier())+"+"); + } + + result = result.substring(0, result.length()-1); + result = result.concat(")"); + + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + if(tProbMap.containsKey(tm.get(t.getIdentifier()))) { + + if(ts.getOutEdges(state).size()==1) {//if transition t is the only edge + tProbMap.get(tm.get(t.getIdentifier())).put(state.getLabel(), "1"); + i++; + } + else { + tProbMap.get(tm.get(t.getIdentifier())).put(state.getLabel(), tm.get(t.getIdentifier())+"/"+result); + i++; + } + } + else { + HashMap tempMap = new HashMap(); + if(ts.getOutEdges(state).size()==1) {//if transition t is the only edge + tempMap.put(state.getLabel(), "1"); + } + else { + tempMap.put(state.getLabel(), tm.get(t.getIdentifier())+"/"+result); + } + i++; + tProbMap.put(tm.get(t.getIdentifier()), tempMap); + } + } + } + + Object[] obj = new Object[2]; + obj[0] = tProbMap; + obj[1] = x2ProbMap; + return obj; + } + + public Object[] calculateTSForTrace(PluginContext context, Petrinet net, Marking state, PetrinetSemantics semantics) + throws ConnectionCannotBeObtained { + semantics.initialize(net.getTransitions(), new Marking(state)); + return buildAndConnectForTrace(context, net, state, semantics, null); + } + + + private Object[] buildAndConnectForTrace(PluginContext context, PetrinetGraph net, Marking initial, + Semantics semantics, CoverabilityGraph coverabilityGraph) + throws ConnectionCannotBeObtained { + context.getConnectionManager().getFirstConnection(InitialMarkingConnection.class, context, net, initial); + ReachabilityGraph ts = null; + NetAnalysisInformation.BOUNDEDNESS info = null; + + try { + BoundednessInfoConnection analysis = context.getConnectionManager().getFirstConnection( + BoundednessInfoConnection.class, context, net, initial, semantics); + info = analysis.getObjectWithRole(BehavioralAnalysisInformationConnection.NETANALYSISINFORMATION); + } catch (Exception e) { + // No connections available + } + + if ((info != null) && info.getValue().equals(NetAnalysisInformation.UnDetBool.FALSE)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + + if (coverabilityGraph != null) {// && !bounded) { + if (!BoundednessAnalyzer.isBounded(coverabilityGraph)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + // clone the graph and return + Map mapping = new HashMap(); + + ts = new ReachabilityGraph("StateSpace of " + net.getLabel()); + for (Object o : coverabilityGraph.getStates()) { + CTMarking m = (CTMarking) o; + Marking tsm = new Marking(m); + ts.addState(tsm); + mapping.put(m, tsm); + } + } + + StartStateSet startStates = new StartStateSet(); + startStates.add(initial); + + if (ts == null) { + ts = doBreadthFirst(context, net.getLabel(), initial, semantics, MAXSTATES); + } + if (ts == null) { + // Problem with the reachability graph. + context.getFutureResult(0).cancel(true); + return null; + } + + AcceptStateSet acceptingStates = new AcceptStateSet(); + for (State state : ts.getNodes()) { + if (ts.getOutEdges(state).isEmpty()) { + state.setAccepting(true); + acceptingStates.add(state.getIdentifier()); + } + } + + Marking[] markings = ts.getStates().toArray(new Marking[0]); + ReachabilitySet rs = new ReachabilitySet(markings); + + context.addConnection(new ReachabilitySetConnection(net, initial, rs, semantics, "Reachability Set")); + context.addConnection(new StateSpaceConnection(net, initial, ts, semantics)); + context.addConnection(new ReachabilityConnection(ts, startStates, acceptingStates)); + context.addConnection(new DeadMarkingConnection(net, initial, acceptingStates, semantics)); + + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + State initStateForTrace = new State(initial,ts); + + return new Object[]{ts, rs, initStateForTrace, acceptingStates}; + } + + private ReachabilityGraph doBreadthFirst(PluginContext context, String label, Marking state, + Semantics semantics, int max) { + ReachabilityGraph ts = new ReachabilityGraph("StateSpace of " + label); + ts.addState(state); + Queue newStates = new LinkedList(); + newStates.add(state); + do { + newStates.addAll(extend(ts, newStates.poll(), semantics, context)); + } while (!newStates.isEmpty() && (ts.getStates().size() < max)); + if (!newStates.isEmpty()) { + // This net has been shows to be unbounded on this marking + context.log("The behaviour of the given net is has over " + max + " states. Aborting...", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + return null; + } + return ts; + + } + + private Set extend(ReachabilityGraph ts, + Marking state, + Semantics semantics, + PluginContext context) { + Set newStates = new HashSet(); + semantics.setCurrentState(state); + for (Transition t : semantics.getExecutableTransitions()) { + semantics.setCurrentState(state); + try { + /* + * [HV] The local variable info is never read + * ExecutionInformation info = + */ + semantics.executeExecutableTransition(t); + // context.log(info.toString(), MessageLevel.DEBUG); + } catch (IllegalTransitionException e) { + context.log(e); + assert (false); + } + Marking newState = semantics.getCurrentState(); + + if (ts.addState(newState)) { + newStates.add(newState); + int size = ts.getEdges().size(); + if (size % 1000 == 0) { + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + } + } +// System.out.println("get id of trans: "+t.getLocalID()+" "+t.getLabel()); + + ts.addTransition(state, newState, t.getLocalID(), t.getLabel(), t.isInvisible()); + + semantics.setCurrentState(state); + } + return newStates; + } + + public static double getER(double[] t){ + for(int i=0;i < modelTransitionNum; i++) { + strToDouble.put("t"+String.valueOf(i), t[i]); + } + + if (mapFlag) { + for(String s:traceVariantMap.keySet()) { + if (!isolatedVariantMap.get(s).equals("0")) { + System.out.println("Trace: "+s+", with frequency: "+(double)traceVariantMap.get(s)/logSize); + } + } + mapFlag = false; + } + double val = 0; + for(String s:traceVariantMap.keySet()) { + if (!isolatedVariantMap.get(s).equals("0")) { + val -= Math.log(StrToExp.converStringToMathExp(isolatedVariantMap.get(s),strToDouble)) * (double)traceVariantMap.get(s)/logSize; + } + } + return val; + } + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/plugins/ReachabilityGraphVisualiserPlugin.java b/src/org/processmining/slpnminer/plugins/ReachabilityGraphVisualiserPlugin.java new file mode 100644 index 0000000..f12d61b --- /dev/null +++ b/src/org/processmining/slpnminer/plugins/ReachabilityGraphVisualiserPlugin.java @@ -0,0 +1,50 @@ +//package org.processmining.slpnminer.plugins; +// +//import javax.swing.JComponent; +//import org.processmining.acceptingpetrinet.models.AcceptingPetriNet; +//import org.processmining.acceptingpetrinet.plugins.VisualizeAcceptingPetriNetPlugin; +//import org.processmining.contexts.uitopia.annotations.Visualizer; +//import org.processmining.framework.plugin.PluginContext; +//import org.processmining.framework.plugin.annotations.Plugin; +//import org.processmining.framework.plugin.annotations.PluginLevel; +//import org.processmining.framework.plugin.annotations.PluginVariant; +//import org.processmining.slpnminer.algorithms.GraphVisualiserAlgorithm; +//import org.processmining.slpnminer.models.MinedReachabilityGraph; +//import org.processmining.slpnminer.models.ReachabilityGraph; +//import org.processmining.slpnminer.parameters.GraphVisualiserParameters; +//import org.processmining.models.graphbased.ViewSpecificAttributeMap; +//import org.processmining.models.graphbased.directed.petrinet.InhibitorNet; +//import org.processmining.models.graphbased.directed.petrinet.Petrinet; +//import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +//import org.processmining.models.graphbased.directed.petrinet.ResetInhibitorNet; +//import org.processmining.models.graphbased.directed.petrinet.ResetNet; +//import org.processmining.models.graphbased.directed.petrinet.elements.Place; +//import org.processmining.models.graphbased.directed.transitionsystem.TransitionSystem; +//import org.processmining.models.jgraph.ProMJGraph; +//import org.processmining.models.jgraph.visualization.ProMJGraphPanel; +//import org.processmining.plugins.petrinet.PetriNetVisualization; +//import org.processmining.plugins.transitionsystem.MinedTSVisualization; +// +//public class ReachabilityGraphVisualiserPlugin extends GraphVisualiserAlgorithm { +// +// @Plugin(name = "Visualize Transition System (Dot)",returnLabels = { +// "Visualized Transition System" }, returnTypes = { JComponent.class }, parameterLabels = { +// "Causal Activity Matrix" }, userAccessible = true) +// @Visualizer +// @PluginVariant(requiredParameterLabels = { 0 }) +// public JComponent runUI(PluginContext context, ReachabilityGraph rg) { +// /* +// * Get a hold on the view specific attributes. +// */ +// +// System.out.println("use the vis for ts"); +// ProMJGraphPanel panel = (ProMJGraphPanel) (new MinedReachabilityGraph()).visualize(context, rg); +// ProMJGraph jGraph = panel.getGraph(); +// ViewSpecificAttributeMap map = jGraph.getViewSpecificAttributes(); +// /* +// * Got it. Now create the dot panel. +// */ +// return apply(context, rg, map); +// } +// +//} diff --git a/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetExportPlugin.java b/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetExportPlugin.java new file mode 100644 index 0000000..5b0e19b --- /dev/null +++ b/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetExportPlugin.java @@ -0,0 +1,65 @@ +package org.processmining.slpnminer.plugins; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; + +import org.apache.commons.text.StringEscapeUtils; +import org.processmining.contexts.uitopia.UIPluginContext; +import org.processmining.contexts.uitopia.annotations.UIExportPlugin; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeights; + +@Plugin(name = "Stochastic labelled Petri net exporter", returnLabels = {}, returnTypes = {}, parameterLabels = { + "Inductive visual Miner alignment", "File" }, userAccessible = true) +@UIExportPlugin(description = "Stochastic labelled Petri net", extension = "slpn") +public class StochasticLabelledPetriNetExportPlugin { + @PluginVariant(variantLabel = "Dfg export (Directly follows graph)", requiredParameterLabels = { 0, 1 }) + public void exportDefault(UIPluginContext context, StochasticLabelledPetriNetSimpleWeights net, File file) throws IOException { + export(net, file); + } + + public static void export(StochasticLabelledPetriNetSimpleWeights net, File file) throws IOException { + PrintWriter w = null; + try { + w = new PrintWriter(file); + w.println("# number of places"); + w.println(net.getNumberOfPlaces()); + + w.println("# initial marking"); + for (int place = 0; place < net.getNumberOfPlaces(); place++) { + w.println(net.isInInitialMarking(place)); + } + + w.println("# number of transitions"); + w.println(net.getNumberOfTransitions()); + for (int transition = 0; transition < net.getNumberOfTransitions(); transition++) { + w.println("# transition " + transition); + if (net.isTransitionSilent(transition)) { + w.println("silent"); + } else { + w.println("label " + StringEscapeUtils.escapeJava(net.getTransitionLabel(transition))); + } + w.println("# weight "); + w.println(net.getTransitionWeight(transition)); + + w.println("# number of input places"); + w.println(net.getInputPlaces(transition).length); + for (int place : net.getInputPlaces(transition)) { + w.println(place); + } + + w.println("# number of output places"); + w.println(net.getOutputPlaces(transition).length); + for (int place : net.getOutputPlaces(transition)) { + w.println(place); + } + } + } finally { + if (w != null) { + w.close(); + } + } + } +} diff --git a/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetImportPlugin.java b/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetImportPlugin.java new file mode 100644 index 0000000..8a2c822 --- /dev/null +++ b/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetImportPlugin.java @@ -0,0 +1,83 @@ +package org.processmining.slpnminer.plugins; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; + +import org.processmining.contexts.uitopia.annotations.UIImportPlugin; +import org.processmining.framework.abstractplugins.AbstractImportPlugin; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeights; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeightsImpl; + +@Plugin(name = "Stochastic labelled Petri net", parameterLabels = { "Filename" }, returnLabels = { + "Stochastic labelled Petri net" }, returnTypes = { StochasticLabelledPetriNetSimpleWeights.class }) +@UIImportPlugin(description = "Stochastic labelled Petri net files", extensions = { "slpn" }) +public class StochasticLabelledPetriNetImportPlugin extends AbstractImportPlugin { + public StochasticLabelledPetriNetSimpleWeights importFromStream(PluginContext context, InputStream input, String filename, + long fileSizeInBytes) throws Exception { + return read(input); + } + + public static StochasticLabelledPetriNetSimpleWeightsImpl read(InputStream input) throws NumberFormatException, IOException { + + StochasticLabelledPetriNetSimpleWeightsImpl result = new StochasticLabelledPetriNetSimpleWeightsImpl(); + + BufferedReader r = new BufferedReader(new InputStreamReader(input)); + + int numberOfPlaces = Integer.parseInt(getNextLine(r)); + for (int place = 0; place < numberOfPlaces; place++) { + result.addPlace(); + + int inInitialMarking = Integer.parseInt(getNextLine(r)); + if (inInitialMarking > 0) { + result.addPlaceToInitialMarking(place, inInitialMarking); + } + } + + int numberOfTransitions = Integer.parseInt(getNextLine(r)); + for (int transition = 0; transition < numberOfTransitions; transition++) { + String line = getNextLine(r); + double weight = Double.valueOf(getNextLine(r)); + if (line.startsWith("silent")) { + result.addTransition(weight); + } else if (line.startsWith("label ")) { + result.addTransition(line.substring(6), weight); + } else { + throw new RuntimeException("invalid transition"); + } + + //incoming places + { + int numberOfIncomingPlaces = Integer.parseInt(getNextLine(r)); + for (int p = 0; p < numberOfIncomingPlaces; p++) { + int place = Integer.parseInt(getNextLine(r)); + result.addPlaceTransitionArc(place, transition); + } + } + + //outgoing places + { + int numberOfOutgoingPlaces = Integer.parseInt(getNextLine(r)); + for (int p = 0; p < numberOfOutgoingPlaces; p++) { + int place = Integer.parseInt(getNextLine(r)); + result.addTransitionPlaceArc(transition, place); + } + } + } + + r.close(); + + return result; + } + + public static String getNextLine(BufferedReader r) throws IOException { + String line = r.readLine(); + while (line != null && line.startsWith("#")) { + line = r.readLine(); + } + return line; + } +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetSimpleWeightsVisualisationPlugin.java b/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetSimpleWeightsVisualisationPlugin.java new file mode 100644 index 0000000..e7a2b29 --- /dev/null +++ b/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetSimpleWeightsVisualisationPlugin.java @@ -0,0 +1,37 @@ +package org.processmining.slpnminer.plugins; + +import javax.swing.JComponent; + +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.contexts.uitopia.annotations.Visualizer; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.ProMCanceller; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginLevel; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.plugins.InductiveMiner.plugins.dialogs.IMMiningDialog; +import org.processmining.plugins.graphviz.dot.DotNode; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeights; + +public class StochasticLabelledPetriNetSimpleWeightsVisualisationPlugin + extends StochasticLabelledPetriNetVisualisationPlugin { + @Plugin(name = "Stochastic labelled Petri net (simple weights) visualisation", returnLabels = { + "Dot visualization" }, returnTypes = { JComponent.class }, parameterLabels = { + "stochastic labelled Petri net", "canceller" }, userAccessible = true) + @Visualizer + @UITopiaVariant(affiliation = IMMiningDialog.affiliation, author = IMMiningDialog.author, email = IMMiningDialog.email) + @PluginVariant(variantLabel = "Stochastic labelled Petri net visualisation", requiredParameterLabels = { 0, 1 }) + public JComponent visualise(final PluginContext context, StochasticLabelledPetriNetSimpleWeights net, + ProMCanceller canceller) { + return visualise(net); + } + + public void decoratePlace(StochasticLabelledPetriNetSimpleWeights net, int place, DotNode dotNode) { + + } + + public void decorateTransition(StochasticLabelledPetriNetSimpleWeights net, int transition, DotNode dotNode) { + dotNode.setOption("xlabel", net.getTransitionWeight(transition) + ""); + } + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetVisualisationPlugin.java b/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetVisualisationPlugin.java new file mode 100644 index 0000000..24aca44 --- /dev/null +++ b/src/org/processmining/slpnminer/plugins/StochasticLabelledPetriNetVisualisationPlugin.java @@ -0,0 +1,64 @@ +package org.processmining.slpnminer.plugins; + +import org.processmining.plugins.graphviz.dot.Dot; +import org.processmining.plugins.graphviz.dot.DotNode; +import org.processmining.plugins.graphviz.visualisation.DotPanel; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNet; + +import gnu.trove.map.TIntObjectMap; +import gnu.trove.map.hash.TIntObjectHashMap; + +public abstract class StochasticLabelledPetriNetVisualisationPlugin { + + public DotPanel visualise(N net) { + Dot dot = new Dot(); + + dot.setOption("forcelabels", "true"); + + TIntObjectMap place2dotNode = new TIntObjectHashMap<>(10, 0.5f, -1); + + for (int place = 0; place < net.getNumberOfPlaces(); place++) { + DotNode dotNode = dot.addNode(""); + dotNode.setOption("shape", "circle"); + place2dotNode.put(place, dotNode); + + if (net.isInInitialMarking(place) > 0) { + dotNode.setOption("style", "filled"); + dotNode.setOption("fillcolor", "#80ff00"); + } + + decoratePlace(net, place, dotNode); + } + + for (int transition = 0; transition < net.getNumberOfTransitions(); transition++) { + DotNode dotNode; + + if (net.isTransitionSilent(transition)) { + dotNode = dot.addNode("" + transition); + dotNode.setOption("style", "filled"); + dotNode.setOption("fillcolor", "#8EBAE5"); + } else { + dotNode = dot.addNode(net.getTransitionLabel(transition)); + } + + dotNode.setOption("shape", "box"); + + decorateTransition(net, transition, dotNode); + + for (int place : net.getOutputPlaces(transition)) { + dot.addEdge(dotNode, place2dotNode.get(place)); + } + + for (int place : net.getInputPlaces(transition)) { + dot.addEdge(place2dotNode.get(place), dotNode); + } + } + + return new DotPanel(dot); + } + + public abstract void decoratePlace(N net, int place, DotNode dotNode); + + public abstract void decorateTransition(N net, int transition, DotNode dotNode); + +} \ No newline at end of file diff --git a/src/org/processmining/slpnminer/plugins/UEMSCDiscovery.java b/src/org/processmining/slpnminer/plugins/UEMSCDiscovery.java new file mode 100644 index 0000000..56a269d --- /dev/null +++ b/src/org/processmining/slpnminer/plugins/UEMSCDiscovery.java @@ -0,0 +1,696 @@ +package org.processmining.slpnminer.plugins; + +import gnu.trove.map.TObjectIntMap; +import gnu.trove.map.hash.TObjectIntHashMap; +import org.apache.commons.math4.legacy.analysis.MultivariateFunction; +import org.apache.commons.math4.legacy.optim.InitialGuess; +import org.apache.commons.math4.legacy.optim.MaxEval; +import org.apache.commons.math4.legacy.optim.SimpleBounds; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.GoalType; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.ObjectiveFunction; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.PopulationSize; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.Sigma; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.noderiv.BOBYQAOptimizer; +import org.apache.commons.math4.legacy.optim.nonlinear.scalar.noderiv.CMAESOptimizer; +import org.apache.commons.rng.UniformRandomProvider; +import org.apache.commons.rng.simple.RandomSource; +import org.deckfour.xes.model.XAttribute; +import org.deckfour.xes.model.XEvent; +import org.deckfour.xes.model.XLog; +import org.deckfour.xes.model.XTrace; + +import org.processmining.contexts.uitopia.annotations.UITopiaVariant; +import org.processmining.framework.connections.ConnectionCannotBeObtained; +import org.processmining.framework.plugin.PluginContext; +import org.processmining.framework.plugin.annotations.Plugin; +import org.processmining.framework.plugin.annotations.PluginVariant; +import org.processmining.framework.plugin.events.Logger; +import org.processmining.models.connections.petrinets.behavioral.*; +import org.processmining.models.graphbased.directed.petrinet.Petrinet; +import org.processmining.models.graphbased.directed.petrinet.PetrinetEdge; +import org.processmining.models.graphbased.directed.petrinet.PetrinetGraph; +import org.processmining.models.graphbased.directed.petrinet.PetrinetNode; +import org.processmining.models.graphbased.directed.petrinet.analysis.NetAnalysisInformation; +import org.processmining.models.graphbased.directed.petrinet.analysis.ReachabilitySet; +import org.processmining.models.graphbased.directed.petrinet.elements.Place; +import org.processmining.models.graphbased.directed.petrinet.elements.Transition; +import org.processmining.models.graphbased.directed.petrinet.impl.PetrinetImpl; +import org.processmining.models.semantics.IllegalTransitionException; +import org.processmining.models.semantics.Semantics; +import org.processmining.models.semantics.petrinet.CTMarking; +import org.processmining.models.semantics.petrinet.Marking; +import org.processmining.models.semantics.petrinet.PetrinetSemantics; +import org.processmining.models.semantics.petrinet.impl.PetrinetSemanticsFactory; +import org.processmining.slpnminer.connections.ReachabilityConnection; +import org.processmining.slpnminer.models.BoundednessAnalyzer; +import org.processmining.slpnminer.models.reachabilitygraph.AcceptStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.CoverabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.CrossProductImpl; +import org.processmining.slpnminer.models.reachabilitygraph.ReachabilityGraph; +import org.processmining.slpnminer.models.reachabilitygraph.StartStateSet; +import org.processmining.slpnminer.models.reachabilitygraph.State; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeightsEditable; +import org.processmining.stochasticlabelledpetrinets.StochasticLabelledPetriNetSimpleWeightsImpl; +import org.processmining.slpnminer.connections.DeadMarkingConnection; +import org.processmining.slpnminer.connections.StateSpaceConnection; +import org.processmining.slpnminer.helpers.EquationSystems; +import org.processmining.slpnminer.helpers.IsolateVariable; +import org.processmining.slpnminer.helpers.StrToExp; + +import java.text.ParseException; +import java.util.*; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +@Plugin(name = "Discover SLPN with unit-EMSC", + returnLabels = { "stochastic labelled Petri net" }, + returnTypes = { StochasticLabelledPetriNetSimpleWeightsEditable.class}, + parameterLabels = { "net", "log"}) +public class UEMSCDiscovery { + + private static final int MAXSTATES = 25000; + + private static int modelTransitionNum; + + private static int logSize; + + private static HashMap strToDouble = new HashMap<>(); + + private static HashMap traceVariantMap; + + private static HashMap isolatedVariantMap = new HashMap(); + + private static Boolean mapFlag = true; + + private static Collection netTransitionCollection; + + private static HashMap tTsIdToPetriIdMap = new HashMap(); + + + + @UITopiaVariant(affiliation = UITopiaVariant.EHV, author = "Tian Li", email = "t.li@bpm.rwth-aachen.de", pack = "WeightEstimation") + @PluginVariant(requiredParameterLabels = {0, 1}) + public StochasticLabelledPetriNetSimpleWeightsEditable calculateTS(PluginContext context, XLog log, Petrinet net) throws ConnectionCannotBeObtained, ParseException { + + + Marking initialMarking = guessInitialMarking(net); + logSize = log.size(); + modelTransitionNum = net.getTransitions().size(); + netTransitionCollection = net.getTransitions(); + return calculateTS(context, log, net, initialMarking, PetrinetSemanticsFactory.regularPetrinetSemantics(Petrinet.class)); + } + + public static Marking guessInitialMarking(Petrinet net) { + Marking result = new Marking(); + for (Place p : net.getPlaces()) { + if (net.getInEdges(p).isEmpty()) { + result.add(p); + } + } + return result; + } + + public StochasticLabelledPetriNetSimpleWeightsEditable calculateTS(PluginContext context, XLog log, Petrinet net, Marking state, PetrinetSemantics semantics) + throws ConnectionCannotBeObtained, ParseException { + semantics.initialize(net.getTransitions(), new Marking(state)); + return buildAndConnect(context, log, net, state, semantics, null); + } + + private StochasticLabelledPetriNetSimpleWeightsEditable buildAndConnect(PluginContext context, + XLog log, + PetrinetGraph net, + Marking initial, + Semantics semantics, CoverabilityGraph coverabilityGraph) + throws ConnectionCannotBeObtained, ParseException { + context.getConnectionManager().getFirstConnection(InitialMarkingConnection.class, context, net, initial); + + ReachabilityGraph ts = null; + + + NetAnalysisInformation.BOUNDEDNESS info = null; + + try { + BoundednessInfoConnection analysis = context.getConnectionManager().getFirstConnection( + BoundednessInfoConnection.class, context, net, initial, semantics); + info = analysis.getObjectWithRole(BehavioralAnalysisInformationConnection.NETANALYSISINFORMATION); + } catch (Exception e) { + // No connections available + } + + if ((info != null) && info.getValue().equals(NetAnalysisInformation.UnDetBool.FALSE)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + + if (coverabilityGraph != null) {// && !bounded) { + if (!BoundednessAnalyzer.isBounded(coverabilityGraph)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + // clone the graph and return + Map mapping = new HashMap(); + + ts = new ReachabilityGraph("StateSpace of " + net.getLabel()); + for (Object o : coverabilityGraph.getStates()) { + CTMarking m = (CTMarking) o; + Marking tsm = new Marking(m); + ts.addState(tsm); + mapping.put(m, tsm); + } + for (org.processmining.slpnminer.models.reachabilitygraph.Transition e : coverabilityGraph + .getEdges()) { + Marking source = mapping.get(e.getSource().getIdentifier()); + Marking target = mapping.get(e.getTarget().getIdentifier()); + ts.addTransition(source, target, e.getIdentifier(),e.getVisibility()); + } + } + + StartStateSet startStates = new StartStateSet(); + startStates.add(initial); + + if (ts == null) { + ts = doBreadthFirst(context, net.getLabel(), initial, semantics, MAXSTATES); + } + if (ts == null) { + // Problem with the reachability graph. + context.getFutureResult(0).cancel(true); + return null; + } + + AcceptStateSet acceptingStates = new AcceptStateSet(); + for (State state : ts.getNodes()) { + if (ts.getOutEdges(state).isEmpty()) { + acceptingStates.add(state.getIdentifier()); + } + } + + Marking[] markings = ts.getStates().toArray(new Marking[0]); + ReachabilitySet rs = new ReachabilitySet(markings); + + context.addConnection(new ReachabilitySetConnection(net, initial, rs, semantics, "Reachability Set")); + context.addConnection(new StateSpaceConnection(net, initial, ts, semantics)); + context.addConnection(new ReachabilityConnection(ts, startStates, acceptingStates)); + context.addConnection(new DeadMarkingConnection(net, initial, acceptingStates, semantics)); + + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + +// get all the transitions + HashMap tm = new HashMap<>(); + HashMap tmap = new HashMap<>(); + + int transIdx = 0; + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getEdges()){ + +// System.out.println("iterate transition in ts: " + t.getIdentifier() + " " + t.getLabel()); + if(tmap.containsKey(t.getIdentifier().toString())) { + tm.put(t.getIdentifier(), tmap.get(t.getIdentifier().toString())); + } + else { + tm.put(t.getIdentifier(), "t" + transIdx); + tmap.put(t.getIdentifier().toString(), "t" + transIdx); + transIdx ++; + } + } +// get the probability of transition in the reachability graph of Petri net + Object[] obj2 = getProbabilityFromPetriRG(ts, tm); + HashMap> tProbMap = (HashMap>) obj2[0]; + HashMap x2ProbMap = (HashMap) obj2[1]; + + + Object[] traceObj; + traceVariantMap = new HashMap(); + + System.out.println("1. Start log iteration"); + + Integer j = 0; + for (XTrace trace : log) { + Petrinet traceNet = new PetrinetImpl("trace net"); + Marking initialMarking = new Marking(); + String traceStr = ""; + + if (!trace.isEmpty()) { +// get trace variant + XAttribute tempEv; + for(XEvent e: trace) { + tempEv = e.getAttributes().get("concept:name"); + traceStr = traceStr.concat(tempEv.toString()+" "); + } + + if(traceVariantMap.containsKey(traceStr)) { + Double count = traceVariantMap.get(traceStr) + 1; + traceVariantMap.put(traceStr,count); + continue; + } + else { + traceVariantMap.put(traceStr, 1.0); + } + Place currentPlace = traceNet.addPlace("p" + String.valueOf(0)); + initialMarking.add(currentPlace); + for (int i = 0; i < trace.size(); i++) { + Place nextPlace = traceNet.addPlace("p" + String.valueOf(i + 1)); + Transition t = traceNet.addTransition(String.valueOf(trace.get(i).getAttributes().get("concept:name"))); + t.setInvisible(false); + traceNet.addArc(currentPlace, t); + traceNet.addArc(t, nextPlace); + currentPlace = nextPlace; + } + } + + + traceObj = calculateTSForTrace(context, traceNet, initialMarking, PetrinetSemanticsFactory.regularPetrinetSemantics(Petrinet.class)); + State initStateForPetri = new State(initial,ts); + CrossProductImpl CP = new CrossProductImpl(); + + + Object[] obj = CP.getCrossProduct( + ts, + initStateForPetri, + acceptingStates, + (ReachabilityGraph) traceObj[0], + (State) traceObj[2], + (AcceptStateSet) traceObj[3]); + + if(obj!=null) { + ReachabilityGraph another_ts = (ReachabilityGraph)obj[0]; + HashSet reachableState = another_ts.getReachableState(); + + Boolean noAcceptState = true; + for(State s: another_ts.getNodes()) { + if(s.isAccepting()) { + noAcceptState = false; + } + } + if(noAcceptState) { +// System.out.println("no acc state in this cross product"); + isolatedVariantMap.put(traceStr,"0"); + j++; + continue; + } + + @SuppressWarnings("unchecked") + HashMap combiToPetri = (HashMap)obj[1]; + + //construct equations system + EquationSystems eq = new EquationSystems(); + String assis1 = "Solve"; + + Object[] resultObj = eq.getEqStr(another_ts, tm, tProbMap, combiToPetri, reachableState); + String eqSys = (String) resultObj[0]; + @SuppressWarnings("unchecked") + HashMap replTransProb = (HashMap)resultObj[1]; + + try { + ExecutorService executorService = Executors.newSingleThreadExecutor(); + String isolatedVar = null; + Future future = executorService.submit(() -> IsolateVariable.getIsolatedVar(assis1+eqSys)); + + try { + isolatedVar = future.get(30, TimeUnit.SECONDS); + } catch (TimeoutException e) { + System.out.println("Timeout computing the probability"); + isolatedVariantMap.put(traceStr,"0"); + } catch(Exception e){ + + }finally { + executorService.shutdown(); + } + + if (isolatedVar==null) { + j++; + isolatedVariantMap.put(traceStr,"0"); + continue; + } +// isolatedVar = isolatedVar.replaceAll("\\s+",""); + String transformedIsolatedVar = replaceExpressions(isolatedVar, replTransProb); +// System.out.println("the isolatated var before: "+isolatedVar); +// System.out.println("the "+String.valueOf(j+1)+"-th trace: " + traceStr); +// System.out.println("the isolatated var after: "+transformedIsolatedVar); + isolatedVariantMap.put(traceStr,transformedIsolatedVar); + } + catch(Exception e){ + + } + } + else { + isolatedVariantMap.put(traceStr,"0"); + } + j++; + } + + mapFlag = true; + MultivariateFunction fER = new MultivariateFunction() { + public double value(double[] x) { + return getUEMSC(x); + } + }; + + double[] lowerBound = new double[modelTransitionNum]; + double[] upperBound = new double[modelTransitionNum]; + double[] initGuess = new double[modelTransitionNum]; + double[] sigma = new double[modelTransitionNum]; + Arrays.fill(lowerBound, 0.0001); + Arrays.fill(upperBound, 1.0000); + Arrays.fill(initGuess, 1); + Arrays.fill(sigma, 0.1); + UniformRandomProvider rngG = RandomSource.MT_64.create(); + CMAESOptimizer optimizer = new CMAESOptimizer( + 1000000, + 0, + true, + modelTransitionNum, + 100, + rngG, + true, + null); + + double[] result1 = optimizer.optimize( + new MaxEval(10000), + new ObjectiveFunction(fER), + GoalType.MAXIMIZE, + new PopulationSize((int) (4+3*Math.log(modelTransitionNum))), + new Sigma(sigma), + new InitialGuess(initGuess), + new SimpleBounds(lowerBound, upperBound)).getPoint(); + + StochasticLabelledPetriNetSimpleWeightsEditable result = new StochasticLabelledPetriNetSimpleWeightsImpl(); + + TObjectIntMap oldPlace2place = new TObjectIntHashMap<>(); + for (Place oldPlace : net.getPlaces()) { + int place = result.addPlace(); + oldPlace2place.put(oldPlace, place); + } + + for (Place oldPlace : initial) { + result.addPlaceToInitialMarking(oldPlace2place.get(oldPlace), initial.occurrences(oldPlace)); + } + + TObjectIntMap oldTransition2newTransition = new TObjectIntHashMap<>(); + for (Transition oldTransition : net.getTransitions()) { + int newTransition; + for(String tId:tmap.keySet()) { + if(tId.equals(oldTransition.getLocalID().toString())) { + String tr = tmap.get(tId); + String tr2 = tr.substring(tr.length()-1); + Integer idx = Integer.valueOf(tr2); + double weight = result1[idx]; + + if (oldTransition.isInvisible()) { + newTransition = result.addTransition(weight); + } else { + newTransition = result.addTransition(oldTransition.getLabel(), weight); + } + oldTransition2newTransition.put(oldTransition, newTransition); + break; + } + } + } + for (Transition oldTransition : net.getTransitions()) { + int newTransition = oldTransition2newTransition.get(oldTransition); + for (PetrinetEdge edge : net.getInEdges(oldTransition)) { + Place oldSource = (Place) edge.getSource(); + result.addPlaceTransitionArc(oldPlace2place.get(oldSource), newTransition); + } + for (PetrinetEdge edge : net.getOutEdges(oldTransition)) { + Place oldTarget = (Place) edge.getTarget(); + result.addTransitionPlaceArc(newTransition, oldPlace2place.get(oldTarget)); + } + } + return result; + } + + + private static String replaceExpressions(String input,HashMap varReplace) { + +// String input_without_power = transformPowerExpression(input); +// System.out.println("the var without exp: "+input_without_power); + + + StringBuffer sb = new StringBuffer(); + Pattern pattern = Pattern.compile("x\\d+"); + Matcher matcher = pattern.matcher(input); + + while (matcher.find()) { + String match = matcher.group(); + String replacement = varReplace.getOrDefault(match, match); + matcher.appendReplacement(sb, replacement); + } + + matcher.appendTail(sb); + return sb.toString(); + } + + private static String transformPowerExpression(String powerExpression) { + // Split the expression into variable and exponent parts + String[] parts = powerExpression.split("\\^"); + + // Extract variable and exponent + String variable = parts[0]; + int exponent = Integer.parseInt(parts[1]); + + // Create the transformed expression + StringBuilder transformedExpression = new StringBuilder(); + for (int i = 0; i < exponent; i++) { + transformedExpression.append(variable); + if (i < exponent - 1) { + transformedExpression.append("*"); + } + } + + return transformedExpression.toString(); + } + + private Object[] getProbabilityFromPetriRG( + ReachabilityGraph ts, + HashMap tm) { + + HashMap> tProbMap = new HashMap>(); + Integer i = 0; + HashMap x2ProbMap = new HashMap(); + + + // TODO Auto-generated method stub + for(State state: ts.getNodes()) { + // for each outgoing edges, get the probability + String result = "("; + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + result = result.concat(tm.get(t.getIdentifier())+"+"); + } + + result = result.substring(0, result.length()-1); + result = result.concat(")"); + + for(org.processmining.slpnminer.models.reachabilitygraph.Transition t: ts.getOutEdges(state)) { + if(tProbMap.containsKey(tm.get(t.getIdentifier()))) { + + if(ts.getOutEdges(state).size()==1) {//if transition t is the only edge + tProbMap.get(tm.get(t.getIdentifier())).put(state.getLabel(), "1"); + i++; + } + else { + tProbMap.get(tm.get(t.getIdentifier())).put(state.getLabel(), tm.get(t.getIdentifier())+"/"+result); + i++; + } + } + else { + HashMap tempMap = new HashMap(); + if(ts.getOutEdges(state).size()==1) {//if transition t is the only edge + tempMap.put(state.getLabel(), "1"); + } + else { + tempMap.put(state.getLabel(), tm.get(t.getIdentifier())+"/"+result); + } + i++; + tProbMap.put(tm.get(t.getIdentifier()), tempMap); + } + } + } + + Object[] obj = new Object[2]; + obj[0] = tProbMap; + obj[1] = x2ProbMap; + return obj; + } + + public Object[] calculateTSForTrace(PluginContext context, Petrinet net, Marking state, PetrinetSemantics semantics) + throws ConnectionCannotBeObtained { + semantics.initialize(net.getTransitions(), new Marking(state)); + return buildAndConnectForTrace(context, net, state, semantics, null); + } + + + private Object[] buildAndConnectForTrace(PluginContext context, PetrinetGraph net, Marking initial, + Semantics semantics, CoverabilityGraph coverabilityGraph) + throws ConnectionCannotBeObtained { + context.getConnectionManager().getFirstConnection(InitialMarkingConnection.class, context, net, initial); + ReachabilityGraph ts = null; + NetAnalysisInformation.BOUNDEDNESS info = null; + + try { + BoundednessInfoConnection analysis = context.getConnectionManager().getFirstConnection( + BoundednessInfoConnection.class, context, net, initial, semantics); + info = analysis.getObjectWithRole(BehavioralAnalysisInformationConnection.NETANALYSISINFORMATION); + } catch (Exception e) { + // No connections available + } + + if ((info != null) && info.getValue().equals(NetAnalysisInformation.UnDetBool.FALSE)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + + if (coverabilityGraph != null) {// && !bounded) { + if (!BoundednessAnalyzer.isBounded(coverabilityGraph)) { + // This net has been shows to be unbounded on this marking + context.log("The given net is unbounded on the given initial marking, no Statespace is constructed.", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + // unreachable statement, but safe. + return null; + } + // clone the graph and return + Map mapping = new HashMap(); + + ts = new ReachabilityGraph("StateSpace of " + net.getLabel()); + for (Object o : coverabilityGraph.getStates()) { + CTMarking m = (CTMarking) o; + Marking tsm = new Marking(m); + ts.addState(tsm); + mapping.put(m, tsm); + } + } + + StartStateSet startStates = new StartStateSet(); + startStates.add(initial); + + if (ts == null) { + ts = doBreadthFirst(context, net.getLabel(), initial, semantics, MAXSTATES); + } + if (ts == null) { + // Problem with the reachability graph. + context.getFutureResult(0).cancel(true); + return null; + } + + AcceptStateSet acceptingStates = new AcceptStateSet(); + for (State state : ts.getNodes()) { + if (ts.getOutEdges(state).isEmpty()) { + state.setAccepting(true); + acceptingStates.add(state.getIdentifier()); + } + } + + Marking[] markings = ts.getStates().toArray(new Marking[0]); + ReachabilitySet rs = new ReachabilitySet(markings); + + context.addConnection(new ReachabilitySetConnection(net, initial, rs, semantics, "Reachability Set")); + context.addConnection(new StateSpaceConnection(net, initial, ts, semantics)); + context.addConnection(new ReachabilityConnection(ts, startStates, acceptingStates)); + context.addConnection(new DeadMarkingConnection(net, initial, acceptingStates, semantics)); + + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + State initStateForTrace = new State(initial,ts); + + return new Object[]{ts, rs, initStateForTrace, acceptingStates}; + } + + private ReachabilityGraph doBreadthFirst(PluginContext context, String label, Marking state, + Semantics semantics, int max) { + ReachabilityGraph ts = new ReachabilityGraph("StateSpace of " + label); + ts.addState(state); + Queue newStates = new LinkedList(); + newStates.add(state); + do { + newStates.addAll(extend(ts, newStates.poll(), semantics, context)); + } while (!newStates.isEmpty() && (ts.getStates().size() < max)); + if (!newStates.isEmpty()) { + // This net has been shows to be unbounded on this marking + context.log("The behaviour of the given net is has over " + max + " states. Aborting...", + Logger.MessageLevel.ERROR); + context.getFutureResult(0).cancel(true); + return null; + } + return ts; + + } + + private Set extend(ReachabilityGraph ts, + Marking state, + Semantics semantics, + PluginContext context) { + Set newStates = new HashSet(); + semantics.setCurrentState(state); + for (Transition t : semantics.getExecutableTransitions()) { + semantics.setCurrentState(state); + try { + /* + * [HV] The local variable info is never read + * ExecutionInformation info = + */ + semantics.executeExecutableTransition(t); + // context.log(info.toString(), MessageLevel.DEBUG); + } catch (IllegalTransitionException e) { + context.log(e); + assert (false); + } + Marking newState = semantics.getCurrentState(); + + if (ts.addState(newState)) { + newStates.add(newState); + int size = ts.getEdges().size(); + if (size % 1000 == 0) { + context.log("Statespace size: " + ts.getStates().size() + " states and " + ts.getEdges().size() + + " transitions.", Logger.MessageLevel.DEBUG); + } + } +// System.out.println("get id of trans: "+t.getLocalID()+" "+t.getLabel()); + + ts.addTransition(state, newState, t.getLocalID(), t.getLabel(), t.isInvisible()); + + semantics.setCurrentState(state); + } + return newStates; + } + + public static double getUEMSC(double[] t){ + for(int i=0;i < modelTransitionNum; i++) { + strToDouble.put("t"+String.valueOf(i), t[i]); + } + + if (mapFlag) { + for(String s:traceVariantMap.keySet()) { + if (!isolatedVariantMap.get(s).equals("0")) { + System.out.println("Trace: "+s+", with frequency: "+(double)traceVariantMap.get(s)/logSize); + } + } + mapFlag = false; + } + double val = 0; + for(String s:traceVariantMap.keySet()) { + if (!isolatedVariantMap.get(s).equals("0")) { + val -= Math.max((double)traceVariantMap.get(s)/logSize - StrToExp.converStringToMathExp(isolatedVariantMap.get(s),strToDouble), 0); + } + } + return val; + } + +} \ No newline at end of file