repo
stringlengths 1
191
⌀ | file
stringlengths 23
351
| code
stringlengths 0
5.32M
| file_length
int64 0
5.32M
| avg_line_length
float64 0
2.9k
| max_line_length
int64 0
288k
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
Janus | Janus-master/src/minerful/concept/constraint/xmlenc/ConstraintsSetAdapter.java | package minerful.concept.constraint.xmlenc;
import java.util.ArrayList;
import java.util.Set;
import java.util.TreeSet;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.adapters.XmlAdapter;
import minerful.concept.constraint.Constraint;
@XmlRootElement
public class ConstraintsSetAdapter extends XmlAdapter<ConstraintsSetAdapter.SetList, Set<Constraint>>{
public static class SetList {
@XmlElements({
@XmlElement(type=Constraint.class, name="constraint"),
})
public ArrayList<Constraint> list = null;
private SetList() {}
public SetList(Set<Constraint> list) {
this();
this.list = new ArrayList<Constraint>(list);
}
public Set<Constraint> getSetList() {
return new TreeSet<Constraint>(this.list);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("SetList [list=");
builder.append(list);
builder.append("]");
return builder.toString();
}
}
@XmlElement(name="constraints")
public ConstraintsSetAdapter.SetList list;
private ConstraintsSetAdapter() {}
public ConstraintsSetAdapter(Set<Constraint> value) {
this();
this.list = new ConstraintsSetAdapter.SetList(value);
}
@Override
public ConstraintsSetAdapter.SetList marshal(
Set<Constraint> v) throws Exception {
return new ConstraintsSetAdapter.SetList(v);
}
@Override
public Set<Constraint> unmarshal(
ConstraintsSetAdapter.SetList v)
throws Exception {
return v.getSetList();
}
}
| 1,599 | 24 | 102 | java |
Janus | Janus-master/src/minerful/concept/xmlenc/CharAdapter.java | package minerful.concept.xmlenc;
import javax.xml.bind.annotation.adapters.XmlAdapter;
public class CharAdapter extends XmlAdapter<String, Character> {
@Override
public String marshal(Character c) throws Exception {
return String.valueOf(c);
}
@Override
public Character unmarshal(String s) throws Exception {
return s.charAt(0);
}
} | 346 | 22.133333 | 64 | java |
Janus | Janus-master/src/minerful/concept/xmlenc/TaskClassAdapter.java | package minerful.concept.xmlenc;
import javax.xml.bind.annotation.adapters.XmlAdapter;
import minerful.concept.AbstractTaskClass;
import minerful.logparser.StringTaskClass;
public class TaskClassAdapter extends XmlAdapter<String, AbstractTaskClass> {
@Override
public String marshal(AbstractTaskClass c) throws Exception {
return c.getName();
}
@Override
public AbstractTaskClass unmarshal(String s) throws Exception {
return new StringTaskClass(s);
}
} | 467 | 25 | 77 | java |
Janus | Janus-master/src/minerful/examples/api/discovery/MinerFulCallerOnStringFile.java | package minerful.examples.api.discovery;
import java.io.File;
import minerful.MinerFulMinerLauncher;
import minerful.MinerFulOutputManagementLauncher;
import minerful.concept.ProcessModel;
import minerful.io.params.OutputModelParameters;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.params.InputLogCmdParameters;
import minerful.params.InputLogCmdParameters.InputEncoding;
import minerful.params.SystemCmdParameters;
import minerful.params.ViewCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters;
/**
* This example class demonstrates how to call MINERful to discover a process model out of strings saved on a file.
* @author Claudio Di Ciccio ([email protected])
*/
public class MinerFulCallerOnStringFile {
public static void main(String[] args) {
InputLogCmdParameters inputParams =
new InputLogCmdParameters();
MinerFulCmdParameters minerFulParams =
new MinerFulCmdParameters();
ViewCmdParameters viewParams =
new ViewCmdParameters();
OutputModelParameters outParams =
new OutputModelParameters();
SystemCmdParameters systemParams =
new SystemCmdParameters();
PostProcessingCmdParameters postParams =
new PostProcessingCmdParameters();
inputParams.inputLogFile = new File("/home/claudio/Desktop/Temp-MINERful/testlog.txt");
inputParams.inputLanguage = InputEncoding.strings;
MinerFulMinerLauncher miFuMiLa = new MinerFulMinerLauncher(inputParams, minerFulParams, postParams, systemParams);
ProcessModel processModel = miFuMiLa.mine();
outParams.fileToSaveAsXML = new File("/home/claudio/Desktop/Temp-MINERful/model.xml");
MinerFulOutputManagementLauncher outputMgt = new MinerFulOutputManagementLauncher();
outputMgt.manageOutput(processModel, viewParams, outParams, systemParams);
System.out.println(processModel);
System.exit(0);
}
} | 1,877 | 34.433962 | 116 | java |
Janus | Janus-master/src/minerful/examples/api/discovery/MinerFulObserverInvokerOnXesFile.java | package minerful.examples.api.discovery;
import java.io.File;
import java.util.ArrayList;
import java.util.Observable;
import java.util.Observer;
import minerful.MinerFulMinerLauncher;
import minerful.MinerFulOutputManagementLauncher;
import minerful.MinerFulSimplificationLauncher;
import minerful.concept.ProcessModel;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintChange;
import minerful.io.params.OutputModelParameters;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.params.InputLogCmdParameters;
import minerful.params.SystemCmdParameters;
import minerful.params.ViewCmdParameters;
import minerful.params.SystemCmdParameters.DebugLevel;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters.PostProcessingAnalysisType;
/**
* This example class demonstrates how to invoke the MINERful miner as an API, and subsequently observe the
* changes that are applied to the process model from the MinerFulSimplificationLauncher.
* Lastly, we save the model as a Declare Map file.
*
* @author Claudio Di Ciccio ([email protected])
*
*/
public class MinerFulObserverInvokerOnXesFile implements Observer {
public static void main(String[] args) {
//////////////////////////////////////////////////////////////////
// Discovery phase
//////////////////////////////////////////////////////////////////
InputLogCmdParameters inputParams =
new InputLogCmdParameters();
MinerFulCmdParameters minerFulParams =
new MinerFulCmdParameters();
ViewCmdParameters viewParams =
new ViewCmdParameters();
OutputModelParameters outParams =
new OutputModelParameters();
SystemCmdParameters systemParams =
new SystemCmdParameters();
PostProcessingCmdParameters postParams =
new PostProcessingCmdParameters();
inputParams.inputLogFile = new File("/home/claudio/Code/MINERful/logs/BPIC2012/financial_log.xes.gz");
postParams.supportThreshold = 0.9;
postParams.confidenceThreshold = 0.25;
postParams.interestFactorThreshold = 0.125;
// Optionally, exclude some tasks from the analysis.
minerFulParams.activitiesToExcludeFromResult = new ArrayList<String>();
minerFulParams.activitiesToExcludeFromResult.add("W_Wijzigen contractgegevens");
minerFulParams.activitiesToExcludeFromResult.add("W_Valideren aanvraag");
minerFulParams.activitiesToExcludeFromResult.add("W_Completeren aanvraag");
// With the following option set up to "false", redundant/inconsistent/below-thresholds constraints are retained in the model, although marked as redundant/inconsistent/below-thresholds
postParams.cropRedundantAndInconsistentConstraints = false;
// To completely remove any form of post-processing, uncomment the following line:
// postParams.analysisType = PostProcessingAnalysisType.NONE;
// Run the discovery algorithm
System.out.println("Running the discovery algorithm...");
MinerFulMinerLauncher miFuMiLa = new MinerFulMinerLauncher(inputParams, minerFulParams, postParams, systemParams);
ProcessModel processModel = miFuMiLa.mine();
System.out.println("...Done");
//////////////////////////////////////////////////////////////////
// Observing the changes in the model with the "observer" pattern implementation
//////////////////////////////////////////////////////////////////
// Start observing changes in the model
System.out.println("Starting to observe the changes in the process model...");
processModel.addObserver(new MinerFulObserverInvokerOnXesFile());
//////////////////////////////////////////////////////////////////
// Simplification phase
//////////////////////////////////////////////////////////////////
// Set up the new options for the simplification tool. Beware that untouched options stay the same, of course.
postParams.supportThreshold = 0.9;
postParams.confidenceThreshold = 0.5;
postParams.interestFactorThreshold = 0.25;
postParams.postProcessingAnalysisType = PostProcessingAnalysisType.HIERARCHYCONFLICTREDUNDANCYDOUBLE;
// Run the simplification algorithm
System.out.println("Running the simplification algorithm...");
MinerFulSimplificationLauncher miFuSiLa = new MinerFulSimplificationLauncher(processModel, postParams);
miFuSiLa.simplify();
System.out.println("...Done");
//////////////////////////////////////////////////////////////////
// Saving
//////////////////////////////////////////////////////////////////
// Specify the output files locations
// Please notice that only the XML-saved model may contain also the redundant/conflicting/below-the-thresholds constraints.
// To do so, the
// postParams.cropRedundantAndInconsistentConstraints = false;
// directive was given. By leaving the default value (true), the model does NOT contain the redundant/conflicting/below-the-thresholds constraints.
outParams.fileToSaveAsXML = new File("/home/claudio/Code/MINERful/temp/BPIC2012-disco-minerful.xml");
// Please notice that NONE of the Declare-map XML-, JSON-, or CSV-formatted copies contain the redundant/conflicting/below-the-thresholds constraints.
outParams.fileToSaveAsConDec = new File("/home/claudio/Code/MINERful/temp/BPIC2012-disco-declaremap.xml");
outParams.fileToSaveAsJSON = new File("/home/claudio/Code/MINERful/temp/BPIC2012-disco.json");
outParams.fileToSaveConstraintsAsCSV = new File("/home/claudio/Code/MINERful/temp/BPIC2012-disco.csv");
System.out.println("Saving...");
MinerFulOutputManagementLauncher outputMgt = new MinerFulOutputManagementLauncher();
outputMgt.manageOutput(processModel, viewParams, outParams, systemParams);
System.out.println("...Done");
//////////////////////////////////////////////////////////////////
// Cropping the identified redundant/inconsistent constraints
//////////////////////////////////////////////////////////////////
// Let us minimise the model now, by removing the redundant/conflicting/below-the-thresholds constraints.
postParams.cropRedundantAndInconsistentConstraints = true;
postParams.postProcessingAnalysisType = PostProcessingAnalysisType.NONE;
// It is not necessary to go again through all checks again, if we do not want to change the thresholds or the conflict/redundancy-check policies: it is just enough to set the previous option to "true"
System.out.println("Removing the already detected inconsistencies/redundancies...");
miFuSiLa.simplify();
System.out.println("...Done");
//////////////////////////////////////////////////////////////////
// Saving again...
//////////////////////////////////////////////////////////////////
outParams.fileToSaveAsXML = new File("/home/claudio/Code/MINERful/temp/BPIC2012-disco-minerful-min.xml");
System.out.println("Saving...");
outputMgt.manageOutput(processModel, viewParams, outParams, systemParams);
System.out.println("...Done");
// That's all for now
System.exit(0);
}
/**
* Just a simple implementation of the method to implement for observers on the process model.
* It prints what happened.
*/
@Override
public void update(Observable o, Object arg) {
// Just to check whether "o", namely the notifier, is a process model.
// Until a new Observer-observable framework is not provided for other objects of MINERful, this check is basically useless.
if (ProcessModel.class.isAssignableFrom(o.getClass())) {
ConstraintChange change = (ConstraintChange) arg;
System.out.println("Change detected! "
+ "Constraint "
+ change.constraint
+ " has updated its "
+ change.property
+ " to the new value of "
+ change.value);
// The following line is used to show that one can access all properties of the modified constraint.
System.out.println("\tIs it suitable for elimination? "
+ change.constraint.isMarkedForExclusion());
}
}
} | 7,922 | 43.511236 | 203 | java |
Janus | Janus-master/src/minerful/examples/api/fitness/FitnessCheckOfDiscoveredProcessModel.java | package minerful.examples.api.fitness;
import java.io.File;
import minerful.MinerFulFitnessCheckLauncher;
import minerful.MinerFulMinerLauncher;
import minerful.checking.params.CheckingCmdParameters;
import minerful.concept.ProcessModel;
import minerful.logparser.LogParser;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.params.InputLogCmdParameters;
import minerful.params.InputLogCmdParameters.EventClassification;
import minerful.params.SystemCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters.PostProcessingAnalysisType;
public class FitnessCheckOfDiscoveredProcessModel {
private static final String EXAMPLE_LOG_FILE_1 = "/home/claudio/Code/MINERful/logs/BPIC2017/BPI-Challenge-2017-sample1.xes";
private static final String EXAMPLE_LOG_FILE_2 = "/home/claudio/Code/MINERful/logs/BPIC2017/BPI-Challenge-2017-sample2.xes";
private static final String EXAMPLE_LOG_TEST_OUT_CSV_FILE = "/home/claudio/Temp/fitness-log-test-example.csv";
private static final String EXAMPLE_TRACE_TEST_OUT_CSV_FILE = "/home/claudio/Temp/fitness-trace-test-example.csv";
public static void main(String[] args) {
//////////////////////////////////////////////////////////////////
//Discovery phase
//////////////////////////////////////////////////////////////////
InputLogCmdParameters inputLogParams = new InputLogCmdParameters();
MinerFulCmdParameters minerFulParams = new MinerFulCmdParameters();
SystemCmdParameters systemParams = new SystemCmdParameters();
PostProcessingCmdParameters postParams = new PostProcessingCmdParameters();
inputLogParams.inputLogFile = new File(EXAMPLE_LOG_FILE_1);
inputLogParams.eventClassification = EventClassification.name;
// Use the one below if you want to classify events not just by their task name!
postParams.supportThreshold = 0.95; // For a sure total fit with the event log, this parameter should be set to 1.0
postParams.confidenceThreshold = 0.66; // The higher this is, the higher the frequency of occurrence of tasks triggering the returned constraints
postParams.interestFactorThreshold = 0.5; // The higher this is, the higher the frequency of occurrence of tasks involved in the returned constraints
// Remove redundant constraints. WARNING: this may take some time.
// The language of the model remains completely unchanged. What changes is the number of constraints in it.
postParams.postProcessingAnalysisType = PostProcessingAnalysisType.HIERARCHYCONFLICTREDUNDANCYDOUBLE;
// To leave the default post-processing, comment the line above. To completely remove any form of post-processing, comment the line above and uncomment the following one
// postParams.postProcessingAnalysisType = PostProcessingAnalysisType.NONE;
// Run the discovery algorithm
System.out.println("Running the discovery algorithm...");
MinerFulMinerLauncher miFuMiLa = new MinerFulMinerLauncher(inputLogParams, minerFulParams, postParams, systemParams);
ProcessModel processModel = miFuMiLa.mine();
System.out.println("...Done");
//////////////////////////////////////////////////////////////////
//Evaluation phase on an entire log
//////////////////////////////////////////////////////////////////
CheckingCmdParameters chkParams = new CheckingCmdParameters();
inputLogParams.inputLogFile = new File(EXAMPLE_LOG_FILE_2);
chkParams.fileToSaveResultsAsCSV = new File(EXAMPLE_LOG_TEST_OUT_CSV_FILE);
LogParser loPar = MinerFulMinerLauncher.deriveLogParserFromLogFile(inputLogParams);
MinerFulFitnessCheckLauncher miFuCheLa = new MinerFulFitnessCheckLauncher(processModel, loPar, chkParams);
// Check the process model extracted from EXAMPLE_LOG_FILE_1 against EXAMPLE_LOG_FILE_2
miFuCheLa.check();
//////////////////////////////////////////////////////////////////
//Evaluation phase specifically on a single trace of a log
//////////////////////////////////////////////////////////////////
chkParams.fileToSaveResultsAsCSV = new File(EXAMPLE_TRACE_TEST_OUT_CSV_FILE);
// Check the process model extracted from EXAMPLE_LOG_FILE_1 against the first trace of EXAMPLE_LOG_FILE_2
miFuCheLa.check(loPar.traceIterator().next());
System.exit(0);
}
} | 4,288 | 53.291139 | 171 | java |
Janus | Janus-master/src/minerful/examples/api/fitness/FitnessCheckOfDiscoveredProcessModelWithOpenXes.java | package minerful.examples.api.fitness;
import java.io.File;
import org.deckfour.xes.in.XesXmlParser;
import org.deckfour.xes.model.XLog;
import org.deckfour.xes.model.XTrace;
import org.processmining.plugins.declareminer.visualizing.DeclareMap;
import minerful.MinerFulMinerLauncher;
import minerful.checking.integration.prom.ModelFitnessEvaluatorOpenXesInterface;
import minerful.checking.relevance.dao.ModelFitnessEvaluation;
import minerful.concept.ProcessModel;
import minerful.io.encdec.declaremap.DeclareMapEncoderDecoder;
import minerful.logparser.LogEventClassifier.ClassificationType;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.params.InputLogCmdParameters;
import minerful.params.InputLogCmdParameters.EventClassification;
import minerful.params.SystemCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters.PostProcessingAnalysisType;
public class FitnessCheckOfDiscoveredProcessModelWithOpenXes {
private static final String EXAMPLE_LOG_FILE_1 = "/home/claudio/Code/MINERful/logs/BPIC2017/BPI-Challenge-2017-sample1.xes";
private static final String EXAMPLE_LOG_FILE_2 = "/home/claudio/Code/MINERful/logs/BPIC2017/BPI-Challenge-2017-sample2.xes";
private static final String EXAMPLE_LOG_FILE_3 = "/home/claudio/Code/MINERful/logs/BPIC2017/BPI-Challenge-2017-sample3.xes";
private static final int EXAMPLE_XTRACE_PICK_2 = 64;
private static final int EXAMPLE_XTRACE_PICK_3 = 128;
public static void main(String[] args) {
//////////////////////////////////////////////////////////////////
//Discovery phase
//////////////////////////////////////////////////////////////////
// Initialising parameters. Read their documentation to know more about their customisations
InputLogCmdParameters inputLogParams = new InputLogCmdParameters();
MinerFulCmdParameters minerFulParams = new MinerFulCmdParameters();
SystemCmdParameters systemParams = new SystemCmdParameters();
PostProcessingCmdParameters postParams = new PostProcessingCmdParameters();
// Loading log
XLog myXLog = null;
try {
myXLog = new XesXmlParser().parse(new File(EXAMPLE_LOG_FILE_1)).get(0);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
inputLogParams.eventClassification = EventClassification.name;
// // Use the one below if you want to classify events not just by their task name!
// inputLogParams.eventClassification = EventClassification.logspec;
postParams.supportThreshold = 0.95; // For a sure total fit with the event log, this parameter should be set to 1.0
postParams.confidenceThreshold = 0.66; // The higher this is, the higher the frequency of occurrence of tasks triggering the returned constraints
postParams.interestFactorThreshold = 0.5; // The higher this is, the higher the frequency of occurrence of tasks involved in the returned constraints
// Remove redundant constraints. WARNING: this may take some time.
// The language of the model remains completely unchanged. What changes is the number of constraints in it.
postParams.postProcessingAnalysisType = PostProcessingAnalysisType.HIERARCHYCONFLICTREDUNDANCYDOUBLE;
// To leave the default post-processing, comment the line above. To completely remove any form of post-processing, comment the line above and uncomment the following one
// postParams.postProcessingAnalysisType = PostProcessingAnalysisType.NONE;
// Run the discovery algorithm
System.out.println("Running the discovery algorithm...");
MinerFulMinerLauncher miFuMiLa = new MinerFulMinerLauncher(inputLogParams, minerFulParams, postParams, systemParams);
ProcessModel processModel = miFuMiLa.mine(myXLog);
System.out.println("...Done");
// In case we want a reference to an equivalent DeclareMap, use the converter as in the line below.
// Notice that the graphical position of elements in the Declare Map is not rendered here.
// Watch out though: it might take time! In case you do not need this instruction, just comment the following two lines.
DeclareMap declareMap = new DeclareMapEncoderDecoder(processModel).createDeclareMap();
System.out.println("The constraint definitions in the Declare Map amount to: " + declareMap.getModel().constraintDefinitionsCount());
// Notice that there is also a method that returns directly a DeclareMap upon mining the event log:
// DeclareMap declareMap = miFuMiLa.mineDeclareMap(myXLog);
//////////////////////////////////////////////////////////////////
//Evaluation phase on an entire log (an instance of OpenXES XLog)
//////////////////////////////////////////////////////////////////
try {
myXLog = new XesXmlParser().parse(new File(EXAMPLE_LOG_FILE_2)).get(0);
// // If you want a perfect fit, comment the line above and uncomment the one below
// myXLog3 = new XesXmlParser().parse(new File(EXAMPLE_LOG_FILE_1)).get(0);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
ModelFitnessEvaluatorOpenXesInterface xEvalor = new ModelFitnessEvaluatorOpenXesInterface(
myXLog,
ClassificationType.NAME,
// // In case you opted for EventClassification.logspec in the discovery phase above, you might want to replace the previous line with the following
// ClassificationType.LOG_SPECIFIED,
processModel);
ModelFitnessEvaluation xEvalon = xEvalor.evaluateOnLog();
if (xEvalon.isFullyFitting()) {
System.out.println("\nWhooppee! The event log in " + EXAMPLE_LOG_FILE_2 + " is perfectly fitting!\n"); // unlikely, if it is not the same log!
} else {
System.out.println(
"\nThe event log in " + EXAMPLE_LOG_FILE_2 + " did not comply with all constraints."
+ " The average fitness is " + xEvalon.avgFitness()
+ ". Details follow.\n\n"
+ xEvalon.printCSV());
}
//////////////////////////////////////////////////////////////////
//Evaluation phase specifically on a single trace of that XLog
//////////////////////////////////////////////////////////////////
XTrace myXTrace = myXLog.get(EXAMPLE_XTRACE_PICK_2);
xEvalon = xEvalor.evaluateOnTrace(myXTrace);
// Same code as above
if (xEvalon.isFullyFitting()) {
System.out.println("\nWhooppee! Trace " + EXAMPLE_XTRACE_PICK_2 + " of " + EXAMPLE_LOG_FILE_2 + " is perfectly fitting!\n");
} else {
System.out.println(
"\nTrace " + EXAMPLE_XTRACE_PICK_2 + " of " + EXAMPLE_LOG_FILE_2 + " did not comply with all constraints."
+ " The average fitness is " + xEvalon.avgFitness()
+ ". Details follow.\n\n"
+ xEvalon.printCSV());
}
//////////////////////////////////////////////////////////////////
//Evaluation phase specifically on a trace picked out of another XLog.
//////////////////////////////////////////////////////////////////
// Loading log
XLog myXLog3 = null;
try {
myXLog3 = new XesXmlParser().parse(new File(EXAMPLE_LOG_FILE_3)).get(0);
// // If you want a perfect fit, comment the line above and uncomment the one below
// myXLog3 = new XesXmlParser().parse(new File(EXAMPLE_LOG_FILE_1)).get(0);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
XTrace myXTrace3 = myXLog3.get(EXAMPLE_XTRACE_PICK_3);
// This time the event log has changed, so we have to instantiate a new ModelFitnessEvaluatorOpenXesInterface
ModelFitnessEvaluatorOpenXesInterface xEvalor3 = new ModelFitnessEvaluatorOpenXesInterface(
myXLog3,
ClassificationType.NAME,
// // In case you opted for EventClassification.logspec in the discovery phase above, you might want to replace the previous line with the following
// ClassificationType.LOG_SPECIFIED,
processModel);
// Check the process model extracted from EXAMPLE_LOG_FILE_1 against the first trace of EXAMPLE_LOG_FILE_3
ModelFitnessEvaluation xEvalon3 = xEvalor3.evaluateOnTrace(myXTrace3);
// Same code as above
if (xEvalon3.isFullyFitting()) {
System.out.println("\nWhooppee! Trace " + EXAMPLE_XTRACE_PICK_3 + " of " + EXAMPLE_LOG_FILE_3 + " is perfectly fitting!\n");
} else {
System.out.println(
"\nTrace " + EXAMPLE_XTRACE_PICK_3 + " of " + EXAMPLE_LOG_FILE_3 + " did not comply with all constraints."
+ " The average fitness is " + xEvalon.avgFitness()
+ ". Details follow.\n\n"
+ xEvalon.printCSV());
}
System.exit(0);
}
} | 8,373 | 48.845238 | 171 | java |
Janus | Janus-master/src/minerful/examples/api/imperative/FromJsonAndXEStoAutomatonXML.java | package minerful.examples.api.imperative;
import java.io.File;
import java.io.IOException;
import minerful.MinerFulOutputManagementLauncher;
import minerful.concept.ProcessModel;
import minerful.io.encdec.ProcessModelEncoderDecoder;
import minerful.io.params.OutputModelParameters;
import minerful.logparser.LogParser;
import minerful.logparser.XesLogParser;
import minerful.logparser.LogEventClassifier.ClassificationType;
public class FromJsonAndXEStoAutomatonXML {
public static final File OUTPUT_XML_FILE = new File("/home/claudio/example-process-automaton.xml");
public static final File INPUT_XES_FILE = new File("/home/claudio/example-log.xes");
public static void main(String[] args) throws Exception {
// This is a JSON string with the definition of a process. It is not case sensitive, and allows for some extra spaces, dashes, etc. in the template names. */
String processJsonMin =
"{constraints: ["
+ "{template: respondedexistence, parameters: [['Submit abstract'],['Write new paper']]},"
+ "{template: response, parameters: [['Submit paper'],['Send confirmation email']]},"
+ "{template: succession, parameters: [['Submit paper'],['Review paper']]},"
+ "{template: precedence, parameters: [['Review paper'],['Accept paper']]},"
+ "{template: notsuccession, parameters: [['Reject paper'],['Submit paper']]},"
+ "{template: notcoexistence, parameters: [['Accept paper'],['Reject paper']]}"
+ "] }";
ProcessModel proMod =
new ProcessModelEncoderDecoder()
// /* Alternative 1: load from file. Uncomment the following line to use this method. */
// .readFromJsonFile(new File("/home/claudio/Code/MINERful/temp/BPIC2012-disco.json"));
// /* Alternative 2: load from a (minimal) string version of the JSON model. Uncomment the following line to use this method. */
.readFromJsonString(processJsonMin);
/*
* Read the log
*/
LogParser logParser = new XesLogParser(INPUT_XES_FILE, ClassificationType.LOG_SPECIFIED);
/*
* Specifies the parameters used to create the automaton
*/
OutputModelParameters outParams = new OutputModelParameters();
outParams.fileToSaveXmlFileForAutomaton = OUTPUT_XML_FILE;
new MinerFulOutputManagementLauncher().manageOutput(proMod, outParams, logParser);
System.exit(0);
}
} | 2,311 | 42.622642 | 159 | java |
Janus | Janus-master/src/minerful/examples/api/imperative/FromJsonProcessModelToAutomaton.java | package minerful.examples.api.imperative;
import java.io.File;
import java.io.IOException;
import minerful.MinerFulOutputManagementLauncher;
import minerful.concept.ProcessModel;
import minerful.io.encdec.ProcessModelEncoderDecoder;
import minerful.io.params.OutputModelParameters;
import minerful.logparser.LogParser;
import minerful.logparser.XesLogParser;
public class FromJsonProcessModelToAutomaton {
public static final File OUTPUT_DOT_FILE = new File("/home/claudio/example-process-automaton.dot");
public static void main(String[] args) throws IOException {
// This is a JSON string with the definition of a process. It is not case sensitive, and allows for some extra spaces, dashes, etc. in the template names. */
String processJsonMin =
"{constraints: ["
+ "{template: respondedexistence, parameters: [['Submit abstract'],['Write new paper']]},"
+ "{template: response, parameters: [['Submit paper'],['Send confirmation email']]},"
+ "{template: succession, parameters: [['Submit paper'],['Review paper']]},"
+ "{template: precedence, parameters: [['Review paper'],['Accept paper']]},"
+ "{template: notsuccession, parameters: [['Reject paper'],['Submit paper']]},"
+ "{template: notcoexistence, parameters: [['Accept paper'],['Reject paper']]}"
+ "] }";
ProcessModel proMod =
new ProcessModelEncoderDecoder()
// /* Alternative 1: load from file. Uncomment the following line to use this method. */
// .readFromJsonFile(new File("/home/claudio/Code/MINERful/temp/BPIC2012-disco.json"));
// /* Alternative 2: load from a (minimal) string version of the JSON model. Uncomment the following line to use this method. */
.readFromJsonString(processJsonMin);
/*
* Specifies the parameters used to create the automaton
*/
OutputModelParameters outParams = new OutputModelParameters();
outParams.fileToSaveDotFileForAutomaton = OUTPUT_DOT_FILE;
new MinerFulOutputManagementLauncher().manageOutput(proMod, outParams);
System.exit(0);
}
} | 2,029 | 44.111111 | 159 | java |
Janus | Janus-master/src/minerful/examples/api/io/FromDeclareMapToJSONandXMLandCSV.java | package minerful.examples.api.io;
import java.io.File;
import minerful.MinerFulOutputManagementLauncher;
import minerful.concept.ProcessModel;
import minerful.io.encdec.csv.CsvEncoder;
import minerful.io.encdec.declaremap.DeclareMapEncoderDecoder;
import minerful.io.params.OutputModelParameters;
import minerful.params.SystemCmdParameters;
import minerful.params.ViewCmdParameters;
/**
* This example class demonstrates how to use MINERful to convert an existing Declare map XML file into multiple formats.
* Here it is also shown how to limit the columns to be printed in the CSV.
* @author Claudio Di Ciccio ([email protected])
*/
public class FromDeclareMapToJSONandXMLandCSV {
public static void main(String[] args) {
OutputModelParameters outParams =
new OutputModelParameters();
ViewCmdParameters viewParams =
new ViewCmdParameters();
SystemCmdParameters systemParams =
new SystemCmdParameters();
/*
* There are two possible methods of DeclareEncoderDecoder to create a
* minerful.concept.ProcessModel out of a Declare Map:
* 1) public static ProcessModel fromDeclareMapToMinerfulProcessModel(String declareMapFilePath)
* 2) public static ProcessModel fromDeclareMapToMinerfulProcessModel(org.processmining.plugins.declareminer.visualizing.AssignmentModel declareMapModel) {
* The first one is used here, and reads an XML representation of the Declare map.
* The second one can be used to pass in-memory representations of the Declare map.
*/
ProcessModel proMod =
new DeclareMapEncoderDecoder(
"/home/claudio/Code/MINERful/models/mined/bpi_challenge_2013_closed_problems-model-s075-model-s075_CONDEC.xml"
).createMinerFulProcessModel();
outParams.fileToSaveAsXML = new File("/home/claudio/MINERful-declarative-model.xml");
outParams.fileToSaveAsJSON = new File("/home/claudio/MINERful-declarative-model.json");
outParams.fileToSaveConstraintsAsCSV = new File("/home/claudio/MINERful-declarative-model.csv");
outParams.csvColumnsToPrint = new CsvEncoder.PRINT_OUT_ELEMENT[]{
CsvEncoder.PRINT_OUT_ELEMENT.FULL_NAME, // ("Constraint"),
CsvEncoder.PRINT_OUT_ELEMENT.TEMPLATE_NAME, //("Template"),
CsvEncoder.PRINT_OUT_ELEMENT.ACTIVATION, //("Activation"),
CsvEncoder.PRINT_OUT_ELEMENT.TARGET, //("Target"),
/* The following are commented out, because default ConDec models do not bear support, confidence, and interest factor. */
// CsvEncoder.PRINT_OUT_ELEMENT.SUPPORT, //("Support"),
// CsvEncoder.PRINT_OUT_ELEMENT.CONFIDENCE_LEVEL, //("Confidence level"),
// CsvEncoder.PRINT_OUT_ELEMENT.INTEREST_FACTOR, //("Interest factor"),
};
MinerFulOutputManagementLauncher outputMgt = new MinerFulOutputManagementLauncher();
outputMgt.manageOutput(proMod, viewParams, outParams, systemParams);
}
}
| 2,818 | 47.603448 | 157 | java |
Janus | Janus-master/src/minerful/examples/api/io/FromXESLogToStringsLog.java | package minerful.examples.api.io;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.Iterator;
import minerful.MinerFulMinerLauncher;
import minerful.logparser.LogParser;
import minerful.logparser.LogTraceParser;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.params.InputLogCmdParameters;
/**
* This example class demonstrates how to use MINERful to convert an existing XES log into a strings-based new log, and store it.
* @author Claudio Di Ciccio ([email protected])
*/
public class FromXESLogToStringsLog {
public static void main(String[] args) throws FileNotFoundException {
InputLogCmdParameters inputParams =
new InputLogCmdParameters();
MinerFulCmdParameters minerFulParams =
new MinerFulCmdParameters();
inputParams.inputLogFile = new File("/home/claudio/Code/MINERful/logs/BPIC2012/financial_log.xes.gz");
File outputStringLogFile = new File("/home/claudio/Code/MINERful/logs/BPIC2012/financial_log.txt");
PrintWriter outWriter = new PrintWriter(outputStringLogFile);
// Parser to read the event log. Please notice that LogParser is an interface,
// regardless of the specific file format (XES, string...).
// The static "deriveLogParserFromLogFile" method
// takes care of the assignment of the correct class instance to implement the interface.
LogParser logParser = MinerFulMinerLauncher.deriveLogParserFromLogFile(inputParams, minerFulParams);
// Print out the decoding map
System.out.println(logParser.getEventEncoderDecoder().printDecodingMap());
// This iterator reads the event log, trace by trace.
Iterator<LogTraceParser> traceParsersIterator = logParser.traceIterator();
String[] encodedLog = new String[logParser.length()];
// This class reads within each trace, event by event.
LogTraceParser auXTraPar = null;
int i = 0;
String encodedTrace = null;
while (traceParsersIterator.hasNext()) {
auXTraPar = traceParsersIterator.next();
auXTraPar.init();
encodedTrace = auXTraPar.encodeTrace();
encodedLog[i++] = encodedTrace;
outWriter.println(encodedTrace);
}
outWriter.flush();
outWriter.close();
System.out.println("Converted log saved in " + outputStringLogFile);
}
}
| 2,280 | 36.393443 | 129 | java |
Janus | Janus-master/src/minerful/examples/api/logmaking/FromCharactersProcessModelToLog.java | package minerful.examples.api.logmaking;
import java.io.File;
import java.io.IOException;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.existence.End;
import minerful.concept.constraint.existence.Init;
import minerful.concept.constraint.existence.Participation;
import minerful.concept.constraint.relation.Precedence;
import minerful.io.encdec.TaskCharEncoderDecoder;
import minerful.logmaker.MinerFulLogMaker;
import minerful.logmaker.params.LogMakerParameters;
import minerful.logmaker.params.LogMakerParameters.Encoding;
import org.deckfour.xes.model.XLog;
/**
* This usage example class demonstrates how to generate XES logs starting from the definitions of constraints exerted on activities identified by single characters.
* @author Claudio Di Ciccio ([email protected])
*/
public class FromCharactersProcessModelToLog {
public static Integer minEventsPerTrace = 0;
public static Integer maxEventsPerTrace = 5;
public static Long tracesInLog = (long)50;
public static File outputLog = new File("/home/claudio/Desktop/Temp-MINERful/test-log-output/out.xes");
public static void main(String[] args) throws IOException {
//////////////////////////////////////////////////////////////////
//Creation of the process model...
//////////////////////////////////////////////////////////////////
// Create the tasks to be used to model the process
TaskChar
a = new TaskChar('a'),
b = new TaskChar('b'),
c = new TaskChar('c'),
d = new TaskChar('d'),
e = new TaskChar('e');
// Create the task factory (which automatically associates character IDs to tasks)
TaskCharArchive taChaAr = new TaskCharArchive(
a,b,c,d,e
);
// Initialise the manager class of the bag of constraints constituting the declarative process model.
// Notice that it requires the set of tasks as input, to know what the process alphabet is.
ConstraintsBag bag = new ConstraintsBag(taChaAr.getTaskChars());
// Add new constraints to the bag. The first one is a target-branched constraint:
// it has two tasks assigned to the first parameter, instead of one as usual!
bag.add(new Precedence(new TaskCharSet(a, b), new TaskCharSet(c)));
bag.add(new Init(a));
bag.add(new Participation(b));
bag.add(new End(e));
// Create the process model on the basis of the archive of tasks, and the constraints expressed thereupon
ProcessModel proMod = new ProcessModel(taChaAr, bag);
//////////////////////////////////////////////////////////////////
//Creation of the log...
//////////////////////////////////////////////////////////////////
// Initialise the parameters to creat the log
LogMakerParameters logMakParameters =
new LogMakerParameters(
minEventsPerTrace, maxEventsPerTrace, tracesInLog);
// Instantiate the class to make event logs, based on the parameters defined above
MinerFulLogMaker logMak = new MinerFulLogMaker(logMakParameters);
// Create the event log
XLog log = logMak.createLog(proMod);
// Store the log
logMakParameters.outputEncoding = Encoding.xes;
logMakParameters.outputLogFile = outputLog;
logMak.storeLog();
}
} | 3,298 | 39.231707 | 165 | java |
Janus | Janus-master/src/minerful/examples/api/logmaking/FromDeclareMapToLog.java | package minerful.examples.api.logmaking;
import java.io.File;
import java.io.IOException;
import minerful.concept.ProcessModel;
import minerful.io.encdec.declaremap.DeclareMapEncoderDecoder;
import minerful.logmaker.MinerFulLogMaker;
import minerful.logmaker.params.LogMakerParameters;
import minerful.logmaker.params.LogMakerParameters.Encoding;
import org.deckfour.xes.model.XLog;
/**
* This usage example class demonstrates how to generate XES logs from an existing Declare map XML file.
* @author Claudio Di Ciccio ([email protected])
*/
public class FromDeclareMapToLog {
public static final Integer MIN_EVENTS_PER_TRACE = 5;
public static final Integer MAX_EVENTS_PER_TRACE = 45;
public static final Long TRACES_IN_LOG = (long)100;
public static final Encoding OUTPUT_ENCODING = Encoding.xes;
public static final File OUTPUT_LOG = new File("/home/claudio/Desktop/log-from-Declare-map.xes");
public static void main(String[] args) throws IOException {
/*
* There are two possible methods of DeclareEncoderDecoder to create a
* minerful.concept.ProcessModel out of a Declare Map:
* 1) public static ProcessModel fromDeclareMapToMinerfulProcessModel(String declareMapFilePath)
* 2) public static ProcessModel fromDeclareMapToMinerfulProcessModel(org.processmining.plugins.declareminer.visualizing.AssignmentModel declareMapModel) {
* The first one is used here, and reads an XML representation of the Declare map.
* The second one can be used to pass in-memory representations of the Declare map.
*/
ProcessModel proMod =
new DeclareMapEncoderDecoder(
"/home/claudio/model.xml"
).createMinerFulProcessModel();
/*
* Specifies the parameters used to create the log
*/
LogMakerParameters logMakParameters =
new LogMakerParameters(
MIN_EVENTS_PER_TRACE, MAX_EVENTS_PER_TRACE, TRACES_IN_LOG);
/*
* Creates the log.
*/
MinerFulLogMaker logMak = new MinerFulLogMaker(logMakParameters);
/*
* The log XLog is an in-memory representation of the log, which can be later serialized in XES or MXML formats.
*/
XLog log = logMak.createLog(proMod);
logMakParameters.outputEncoding = OUTPUT_ENCODING;
System.out.println(logMak.printEncodedLog());
logMakParameters.outputLogFile = OUTPUT_LOG;
logMak.storeLog();
}
} | 2,317 | 36.387097 | 157 | java |
Janus | Janus-master/src/minerful/examples/api/logmaking/FromJsonProcessModelToLog.java | package minerful.examples.api.logmaking;
import java.io.File;
import java.io.IOException;
import minerful.concept.ProcessModel;
import minerful.io.encdec.ProcessModelEncoderDecoder;
import minerful.logmaker.MinerFulLogMaker;
import minerful.logmaker.params.LogMakerParameters;
import minerful.logmaker.params.LogMakerParameters.Encoding;
import org.deckfour.xes.model.XLog;
/**
* This usage exmaple class demonstrates how to generate XES logs starting with the definitions of constraints specified with JSON objects.
* @author Claudio Di Ciccio ([email protected])
*/
public class FromJsonProcessModelToLog {
public static final Integer MIN_EVENTS_PER_TRACE = 5;
public static final Integer MAX_EVENTS_PER_TRACE = 45;
public static final Long TRACES_IN_LOG = (long)50;
public static final Encoding OUTPUT_ENCODING = Encoding.xes;
public static final File OUTPUT_LOG = new File("/home/claudio/Desktop/log-from-JSON.xes");
public static void main(String[] args) throws IOException {
// This is a JSON string with the minimal definition of a process. It is not case sensitive, and allows for some extra spaces, dashes, etc. in the template names. */
String processJsonMin =
"{constraints: ["
+ "{template: Succession, parameters: [[A],[B]]},"
+ "{template: resPOnse, parameters: [[B],[C]]},"
+ "{template: EnD, parameters: [[D]]},"
+ "{template: existence, parameters: [[D]]},"
+ "{template: \"not chain-succession\", parameters: [[A],[B,D]]}"
+ "] }";
// This is a JSON string with a process having the same constraints as before, but with an unconstrained task on more (E), specified in the "tasks" field. */
String processJsonWithExtraTask =
"{constraints: ["
+ "{template: Succession, parameters: [[A],[B]]},"
+ "{template: resPOnse, parameters: [[B],[C]]},"
+ "{template: EnD, parameters: [[D]]},"
+ "{template: existence, parameters: [[D]]},"
+ "{template: \"not chain-succession\", parameters: [[A],[B,D]]}"
+ "],"
+"tasks: [A,B,C,D,E] }";
ProcessModel proMod =
new ProcessModelEncoderDecoder()
// /* Alternative 1: load from file. Uncomment the following line to use this method. */
// .readFromJsonFile(new File("/home/claudio/Code/MINERful/temp/BPIC2012-disco.json"));
// /* Alternative 2: load from a (minimal) string version of the JSON model. Uncomment the following line to use this method. */
.readFromJsonString(processJsonMin);
// /* Alternative 3: load from another string version of the JSON model. Uncomment the following line to use this method. */
// .readFromJsonString(processJsonWithExtraTask);
/*
* Specifies the parameters used to create the log
*/
LogMakerParameters logMakParameters =
new LogMakerParameters(
MIN_EVENTS_PER_TRACE, MAX_EVENTS_PER_TRACE, TRACES_IN_LOG);
/*
* Creates the log.
*/
MinerFulLogMaker logMak = new MinerFulLogMaker(logMakParameters);
/*
* The log XLog is an in-memory representation of the log, which can be later serialized in XES or MXML formats.
*/
XLog log = logMak.createLog(proMod);
logMakParameters.outputEncoding = OUTPUT_ENCODING;
//System.out.println(logMak.printEncodedLog());
logMakParameters.outputLogFile = OUTPUT_LOG;
logMak.storeLog();
}
} | 3,281 | 41.076923 | 167 | java |
Janus | Janus-master/src/minerful/examples/api/logmaking/FromStringsProcessModelToLog.java | package minerful.examples.api.logmaking;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Set;
import java.util.TreeSet;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharFactory;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.existence.*;
import minerful.concept.constraint.relation.*;
import minerful.io.encdec.TaskCharEncoderDecoder;
import minerful.logmaker.MinerFulLogMaker;
import minerful.logmaker.params.LogMakerParameters;
import minerful.logmaker.params.LogMakerParameters.Encoding;
import minerful.logparser.StringTaskClass;
import org.deckfour.xes.model.XLog;
/**
* This usage example class demonstrates how to generate XES logs from a declarative process model created on the fly.
* @author Claudio Di Ciccio ([email protected])
*/
public class FromStringsProcessModelToLog {
public static Integer minEventsPerTrace = 5;
public static Integer maxEventsPerTrace = 45;
public static Long tracesInLog = (long)50;
public static File outputLog = new File("/home/claudio/Desktop/test-log-output/out.xes");
public static void main(String[] args) throws IOException {
//////////////////////////////////////////////////////////////////
// Creation of the process model...
//////////////////////////////////////////////////////////////////
// Create the task factory (which automatically associates character IDs to tasks)
TaskCharFactory tChFactory = new TaskCharFactory();
// Create the tasks to be used to model the process
TaskChar a0 = tChFactory.makeTaskChar("A0");
TaskChar a0a1 = tChFactory.makeTaskChar("A0A1");
TaskChar b0b1b2b0 = tChFactory.makeTaskChar("B0B1B2_BO");
TaskChar b0b1b2b0b3 = tChFactory.makeTaskChar("B0B1B2_BOB1B2B3");
// Create the tasks archive to store the "process alphabet"
TaskCharArchive taChaAr = new TaskCharArchive(
a0, a0a1, b0b1b2b0, b0b1b2b0b3
);
// Initialise the manager class of the bag of constraints constituting the declarative process model.
// Notice that it requires the set of tasks as input, to know what the process alphabet is.
ConstraintsBag bag = new ConstraintsBag(taChaAr.getTaskChars());
// Add new constraints to the bag. The first one is a target-branched constraint:
// it has two tasks assigned to the first parameter, instead of one as usual!
bag.add(new AlternatePrecedence(new TaskCharSet(a0, a0a1), new TaskCharSet(b0b1b2b0)));
bag.add(new Participation(b0b1b2b0));
// Create the process model on the basis of the archive of tasks, and the constraints expressed thereupon
ProcessModel proMod = new ProcessModel(taChaAr, bag);
//////////////////////////////////////////////////////////////////
// Creation of the log...
//////////////////////////////////////////////////////////////////
// Initialise the parameters to creat the log
LogMakerParameters logMakParameters =
new LogMakerParameters(
minEventsPerTrace, maxEventsPerTrace, tracesInLog);
// Instantiate the class to make event logs, based on the parameters defined above
MinerFulLogMaker logMak = new MinerFulLogMaker(logMakParameters);
// Create the event log
XLog log = logMak.createLog(proMod);
// Store the log
logMakParameters.outputEncoding = Encoding.xes;
logMakParameters.outputLogFile = outputLog;
logMak.storeLog();
}
} | 3,525 | 40 | 118 | java |
Janus | Janus-master/src/minerful/examples/api/simplification/MinerFulSimplificationInvokerOnDeclareMapFile.java | package minerful.examples.api.simplification;
import java.io.File;
import minerful.MinerFulOutputManagementLauncher;
import minerful.MinerFulSimplificationLauncher;
import minerful.concept.ProcessModel;
import minerful.index.comparator.modular.ConstraintSortingPolicy;
import minerful.io.params.InputModelParameters;
import minerful.io.params.InputModelParameters.InputEncoding;
import minerful.io.params.OutputModelParameters;
import minerful.params.SystemCmdParameters;
import minerful.params.ViewCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters.PostProcessingAnalysisType;
/**
* This example class demonstrates how to load a Declare Map file as a process model, then run the simplification engine of MINERful to remove the redundant constraints.
* @author Claudio Di Ciccio ([email protected])
*/
public class MinerFulSimplificationInvokerOnDeclareMapFile {
private static final String EXAMPLE_OUTPUT_PROCESS_MODEL_FILE = "/home/claudio/Desktop/example-model.xml";
private static final String EXAMPLE_INPUT_PROCESS_MODEL_FILE = "/home/claudio/Code/MINERful/models/mined/bpi_challenge_2013_closed_problems-model-s075.xml";
public static void main(String[] args) {
InputModelParameters inputParams = new InputModelParameters();
PostProcessingCmdParameters postParams = new PostProcessingCmdParameters();
ViewCmdParameters viewParams = new ViewCmdParameters();
OutputModelParameters outParams = new OutputModelParameters();
SystemCmdParameters systemParams = new SystemCmdParameters();
// Specifies the type of post-processing analysis, through which getting rid of redundancies or conflicts in the process model
postParams.postProcessingAnalysisType = PostProcessingAnalysisType.HIERARCHYCONFLICTREDUNDANCYDOUBLE;
// Policies according to which constraints are ranked in terms of significance. The position in the array reflects the order with which the policies are used. When a criterion does not establish which constraint in a pair should be put ahead in the ranking, the following one in the array is utilised.
postParams.sortingPolicies = new ConstraintSortingPolicy[]{
ConstraintSortingPolicy.ACTIVATIONTARGETBONDS,
ConstraintSortingPolicy.FAMILYHIERARCHY,
ConstraintSortingPolicy.SUPPORTCONFIDENCEINTERESTFACTOR
};
// Specifies the input file where the model is stored
inputParams.inputFile=new File(EXAMPLE_INPUT_PROCESS_MODEL_FILE);
// Specifies that the input model is stored in the input file as a Declare Map. It can also be a MINERful output model.
inputParams.inputLanguage=InputEncoding.MINERFUL;
MinerFulSimplificationLauncher miFuSimpLa = new MinerFulSimplificationLauncher(inputParams, postParams, systemParams);
/*
* Should the process model be already in memory, it does not make much sense to write it into a file and then read that file. In such a case, please refer to the following constructors for MinerFulSimplificationLauncher:
* MinerFulSimplificationLauncher(AssignmentModel declareMapModel, PostProcessingCmdParameters postParams)
* MinerFulSimplificationLauncher(ProcessModel minerFulProcessModel, PostProcessingCmdParameters postParams)
*/
ProcessModel processModel = miFuSimpLa.simplify();
// To store the simplified process model file somewhere. Please mind that the process model can also be stored as a Declare map. See the specification of minerful.io.params.OutputModelParameters
outParams.fileToSaveAsXML = new File(EXAMPLE_OUTPUT_PROCESS_MODEL_FILE);
MinerFulOutputManagementLauncher outputMgt = new MinerFulOutputManagementLauncher();
outputMgt.manageOutput(processModel, viewParams, outParams, systemParams);
System.out.println("Simplified model: " + processModel);
System.exit(0);
}
} | 3,834 | 56.238806 | 304 | java |
Janus | Janus-master/src/minerful/index/ConstraintIndexHasseBreadthFirstStepper.java | package minerful.index;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map.Entry;
import java.util.TreeSet;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
public class ConstraintIndexHasseBreadthFirstStepper extends ConstraintIndexHasseManager {
public final NavDirection navDirection;
private Collection<ConstraintIndexHasseNode> nodesUnderAnalysis;
private boolean evenStep = false;
public ConstraintIndexHasseBreadthFirstStepper(ConstraintIndexHasseDiagram hasseDiagram, NavDirection navDirection) {
super(hasseDiagram);
this.navDirection = navDirection;
switch (this.navDirection) {
case DOWN:
this.nodesUnderAnalysis = new ArrayList<ConstraintIndexHasseNode>(this.hasseDiagram.root.children.size());
for (Entry<TaskChar, ConstraintIndexHasseNode> entry : this.hasseDiagram.root.children.entrySet())
this.nodesUnderAnalysis.add(entry.getValue());
break;
case UP:
this.nodesUnderAnalysis = hasseDiagram.getSinkNodes();
break;
}
}
public Collection<TaskCharSet> getCurrentTaskCharSetsInBreadthFirstVisit() {
TreeSet<TaskCharSet> tChSets = new TreeSet<TaskCharSet>();
for (ConstraintIndexHasseNode node : this.nodesUnderAnalysis) {
tChSets.add(node.indexedTaskCharSet);
}
return (!evenStep ? tChSets : tChSets.descendingSet());
}
public Collection<ConstraintIndexHasseNode> getCurrentNodesInBreadthFirstVisit() {
return this.nodesUnderAnalysis;
}
public boolean moveOneStepAhead() {
TreeSet<ConstraintIndexHasseNode> nextNodesUnderAnalysis = new TreeSet<ConstraintIndexHasseNode>();
switch (this.navDirection) {
case DOWN:
for (ConstraintIndexHasseNode node : this.nodesUnderAnalysis) {
nextNodesUnderAnalysis.addAll(node.children.values());
}
break;
case UP:
for (ConstraintIndexHasseNode node : this.nodesUnderAnalysis) {
nextNodesUnderAnalysis.addAll(node.getParentAndUncles()); // duplicates will be automatically removed!
}
break;
}
if (nextNodesUnderAnalysis.size() < 1)
return false;
this.nodesUnderAnalysis = nextNodesUnderAnalysis;
evenStep = !evenStep;
return true;
}
} | 2,219 | 29.410959 | 118 | java |
Janus | Janus-master/src/minerful/index/ConstraintIndexHasseDiagram.java | package minerful.index;
import java.util.ArrayList;
import java.util.Collection;
import minerful.concept.TaskCharSet;
public class ConstraintIndexHasseDiagram {
final ConstraintIndexHasseNode root;
private Collection<ConstraintIndexHasseNode> sinkNodes;
public ConstraintIndexHasseDiagram() {
this.root = new ConstraintIndexHasseNode(null, TaskCharSet.VOID_TASK_CHAR_SET);
this.sinkNodes = new ArrayList<ConstraintIndexHasseNode>();
}
public void addSink(ConstraintIndexHasseNode sink) {
this.sinkNodes.add(sink);
}
public Collection<ConstraintIndexHasseNode> getSinkNodes() {
return this.sinkNodes;
}
@Override
public String toString() {
StringBuilder sBuil = new StringBuilder();
sBuil.append("BranchedConstraintIndexTree\n");
// sBuil.append(this.root.toString());
sBuil.append(this.root.toPrefixedPathString());
return sBuil.toString();
}
} | 887 | 24.371429 | 81 | java |
Janus | Janus-master/src/minerful/index/ConstraintIndexHasseInverseDepthFirstStepper.java | package minerful.index;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.ListIterator;
import java.util.Stack;
import minerful.concept.TaskCharSet;
public class ConstraintIndexHasseInverseDepthFirstStepper extends ConstraintIndexHasseManager {
private Stack<ListIterator<ConstraintIndexHasseNode>> stackOfIterators;
public ConstraintIndexHasseInverseDepthFirstStepper(ConstraintIndexHasseDiagram hasseDiagram) {
super(hasseDiagram);
this.stackOfIterators = new Stack<ListIterator<ConstraintIndexHasseNode>>();
this.resetCurrentPointers();
this.preSetUpStackOfIterators(this.hasseDiagram.root);
this.setUpStackOfIterators();
}
private void resetCurrentPointers() {
this.currentNode = this.hasseDiagram.root;
this.currentTaskCharSet = this.currentNode.indexedTaskCharSet;
}
private void setUpStackOfIterators() {
if (this.stackOfIterators.size() > 0)
this.updateCurrentPointers();
}
private void preSetUpStackOfIterators(ConstraintIndexHasseNode ancestor) {
while (ancestor.children.size() > 0) {
this.stackOfIterators.push(new ArrayList<ConstraintIndexHasseNode>(ancestor.children.values()).listIterator());
ancestor = ancestor.children.get(ancestor.children.firstKey());
}
}
private void updateCurrentPointers() {
this.currentNode = this.stackOfIterators.peek().next();
this.currentTaskCharSet = this.currentNode.indexedTaskCharSet;
}
public ConstraintIndexHasseNode getCurrentNode() {
return currentNode;
}
public TaskCharSet getCurrentTaskCharSet() {
return currentTaskCharSet;
}
public boolean moveOneStepAhead() {
if (!this.stackOfIterators.isEmpty()) {
if (!this.stackOfIterators.peek().hasNext()) {
this.stackOfIterators.pop();
if (this.stackOfIterators.isEmpty()) {
this.resetCurrentPointers();
return false;
} else {
this.updateCurrentPointers();
return true;
}
} else {
if (this.stackOfIterators.peek().hasPrevious()) { // is it a following sibling? Before visiting it, try and see whether there is a hierarchy below!
ListIterator<ConstraintIndexHasseNode> explorator = this.stackOfIterators.peek();
ConstraintIndexHasseNode newRoot = explorator.next();
this.preSetUpStackOfIterators(newRoot);
explorator.previous();
this.updateCurrentPointers();
return true;
} else {
this.updateCurrentPointers();
return true;
}
}
} else {
this.resetCurrentPointers();
return false;
}
}
public boolean isThereAnyNodeLeftToAnalyse() {
return !(this.currentNode == this.hasseDiagram.root);
}
} | 2,595 | 29.541176 | 151 | java |
Janus | Janus-master/src/minerful/index/ConstraintIndexHasseMaker.java | package minerful.index;
import java.util.TreeMap;
import java.util.TreeSet;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharSet;
import minerful.concept.TaskCharSetFactory;
import minerful.concept.constraint.Constraint;
import org.apache.commons.lang3.StringUtils;
public class ConstraintIndexHasseMaker extends ConstraintIndexHasseManager {
private TaskCharArchive taskCharArchive;
private TaskCharSetFactory taskCharSetFactory;
public ConstraintIndexHasseMaker(TaskCharArchive taskCharArchive,
int maxSizeOfCombos, TaskChar excludedTaskChar) {
this.hasseDiagram = new ConstraintIndexHasseDiagram();
this.currentNode = hasseDiagram.root;
this.taskCharArchive = taskCharArchive;
this.taskCharSetFactory = new TaskCharSetFactory(this.taskCharArchive);
this.currentTaskCharSet = TaskCharSet.VOID_TASK_CHAR_SET;
this.populateHasseDiagram(maxSizeOfCombos, excludedTaskChar);
}
public ConstraintIndexHasseMaker(TaskCharArchive taskCharArchive, int maxSizeOfCombos) {
this(taskCharArchive, maxSizeOfCombos, null);
}
private void populateHasseDiagram(int maxSizeOfCombos, TaskChar excludedTaskChar) {
this.currentNode = hasseDiagram.root;
this.currentTaskCharSet = TaskCharSet.VOID_TASK_CHAR_SET;
if (maxSizeOfCombos > this.taskCharArchive.size() - (excludedTaskChar == null ? 0 : 1))
maxSizeOfCombos = this.taskCharArchive.size() - (excludedTaskChar == null ? 0 : 1);
TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>>
currentDepthNodes = new TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>>(),
newRootNodes = null,
newGenerationNodes = new TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>>();
TreeSet<TaskChar> taskChars = this.taskCharArchive.getCopyOfTaskChars();
if (excludedTaskChar != null)
taskChars.remove(excludedTaskChar);
TreeMap<String, ConstraintIndexHasseNode> historyForNodesGeneratedByTheSameTaskCharBranch = new TreeMap<String, ConstraintIndexHasseNode>();
ConstraintIndexHasseNode nuHasseNode = null;
// First level: single task characters
for (TaskChar tCh : taskChars) {
nuHasseNode = new ConstraintIndexHasseNode(this.currentNode, new TaskCharSet(tCh));
historyForNodesGeneratedByTheSameTaskCharBranch = new TreeMap<String, ConstraintIndexHasseNode>();
this.currentNode.children.put(tCh, nuHasseNode);
historyForNodesGeneratedByTheSameTaskCharBranch.put(String.valueOf(tCh.identifier), nuHasseNode);
currentDepthNodes.put(tCh, historyForNodesGeneratedByTheSameTaskCharBranch);
}
boolean newLevelIsNeeded = (--maxSizeOfCombos) > 0;
TreeSet<TaskChar> remainingTaskChars = taskChars;
// Second to N-th level, with N equal to the size of the alphabet.
TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>> temporaryNewGenerationNodes = null;
TreeMap<String, ConstraintIndexHasseNode> temporaryNewGenerationNodeHistories = null;
while(newLevelIsNeeded) {
newRootNodes = (TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>>)currentDepthNodes.clone();
newGenerationNodes = new TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>>();
for (TaskChar keyTaskChar : currentDepthNodes.keySet()) {
historyForNodesGeneratedByTheSameTaskCharBranch = currentDepthNodes.get(keyTaskChar);
remainingTaskChars = (TreeSet<TaskChar>)taskChars.tailSet(keyTaskChar, false);
for (String history : historyForNodesGeneratedByTheSameTaskCharBranch.keySet()) {
if (remainingTaskChars != null) {
temporaryNewGenerationNodes =
populateHasseOneLevelDeeper(
historyForNodesGeneratedByTheSameTaskCharBranch.get(history),
keyTaskChar,
history,
newRootNodes,
remainingTaskChars
);
for (TaskChar tempNuGenNodesKey : temporaryNewGenerationNodes.keySet()) {
temporaryNewGenerationNodeHistories = temporaryNewGenerationNodes.get(tempNuGenNodesKey);
if (newGenerationNodes.containsKey(tempNuGenNodesKey)) {
for (String historyForTempNuGenNodes : temporaryNewGenerationNodeHistories.keySet()) {
newGenerationNodes.get(tempNuGenNodesKey).put(historyForTempNuGenNodes, temporaryNewGenerationNodeHistories.get(historyForTempNuGenNodes));
}
} else {
newGenerationNodes.put(tempNuGenNodesKey, temporaryNewGenerationNodes.get(tempNuGenNodesKey));
}
}
}
}
}
newLevelIsNeeded = (--maxSizeOfCombos) > 0;
if (newLevelIsNeeded) {
currentDepthNodes = (TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>>)newGenerationNodes.clone();
}
}
// // REMOVED: newGenerationNodes.size() > 1 when the branching factor is lower than the number of process activities
// if (newGenerationNodes.size() > 1) {
// throw new IllegalStateException("Multiple sink nodes in Hasse diagram");
// } else {
for (TaskChar sinkTaskChar : newGenerationNodes.keySet()) {
for (String sinkNodeHistory : newGenerationNodes.get(sinkTaskChar).keySet()) { // expected to be 1
this.hasseDiagram.addSink(newGenerationNodes.get(sinkTaskChar).get(sinkNodeHistory));
}
}
// }
}
private TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>> populateHasseOneLevelDeeper(
ConstraintIndexHasseNode root,
TaskChar rootTaskChar,
String historyForRoot,
TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>> olderGeneration,
TreeSet<TaskChar> taskChars) {
TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>> treep =
new TreeMap<TaskChar, TreeMap<String, ConstraintIndexHasseNode>>();
String nuNodeHistory = null;
TreeMap<String, ConstraintIndexHasseNode>
uncles = null,
nuSiblings = null;
ConstraintIndexHasseNode nuNode = null;
for (TaskChar tCh : taskChars) {
nuSiblings = new TreeMap<String, ConstraintIndexHasseNode>();
nuNode = new ConstraintIndexHasseNode(root, this.taskCharSetFactory.createSet(root.indexedTaskCharSet, tCh));
root.children.put(tCh, nuNode);
nuNodeHistory = historyForRoot + tCh.identifier;
uncles = olderGeneration.get(tCh);
for (String uncleHistory : uncles.keySet()) {
if (StringUtils.containsOnly(uncleHistory, nuNodeHistory)) {
nuNode.uncles.add(uncles.get(uncleHistory));
}
}
nuSiblings.put(nuNodeHistory, nuNode);
treep.put(tCh, nuSiblings);
}
return treep;
}
public ConstraintIndexHasseNode addConstraint(TaskCharSet referenceTaskChSet, Constraint c) {
currentNode = this.searchNodeForConstraint(referenceTaskChSet, c);
currentNode.addConstraint(c);
return currentNode;
}
/**
* Optimised for depth-first search!
* @param refTaChSetId
* @param c
* @return
*/
private ConstraintIndexHasseNode searchNodeForConstraint(TaskCharSet referenceTaskChSet, Constraint c) {
// Is the current node OK for inserting the constraint?
if (currentTaskCharSet.equals(referenceTaskChSet)) {
return this.currentNode;
} else {
// Is the current node a sibling or a deeper descendant of an ancestor?
if (currentTaskCharSet.size() >= referenceTaskChSet.size()) {
this.currentNode = this.currentNode.parent;
this.currentTaskCharSet = this.currentNode.indexedTaskCharSet;
return this.searchNodeForConstraint(referenceTaskChSet, c);
} else {
// The following is implicit: is the current node a parent or a less deep descendant of an ancestor?
// if (currentStringOfIdentiers.length() < refTaChSetId.length()) {
// Is this a direct ancestor?
if (this.currentTaskCharSet.isPrefixOf(referenceTaskChSet)) {
TaskChar parentToFindId = referenceTaskChSet.getTaskChar(this.currentTaskCharSet.size());
this.currentNode = this.currentNode.children.get(parentToFindId);
this.currentTaskCharSet = this.currentNode.indexedTaskCharSet;
return this.searchNodeForConstraint(referenceTaskChSet, c);
// ... or not? In case, you have to rise along the hierarchy and search for a common ancestor!
} else {
this.currentNode = this.currentNode.parent;
this.currentTaskCharSet = this.currentNode.indexedTaskCharSet;
return this.searchNodeForConstraint(referenceTaskChSet, c);
}
}
}
}
} | 8,170 | 42.695187 | 148 | java |
Janus | Janus-master/src/minerful/index/ConstraintIndexHasseManager.java | package minerful.index;
import java.util.TreeMap;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
public abstract class ConstraintIndexHasseManager {
public static enum NavDirection {
DOWN,
UP
}
public ConstraintIndexHasseDiagram hasseDiagram;
protected ConstraintIndexHasseNode currentNode;
protected TaskCharSet currentTaskCharSet;
public ConstraintIndexHasseManager() {
super();
}
public ConstraintIndexHasseManager(ConstraintIndexHasseDiagram hasseDiagram) {
this.hasseDiagram = hasseDiagram;
}
@Override
public String toString() {
return "ConstraintIndexHasseManager [hasseDiagram=" + hasseDiagram
+ "]";
}
public String printInBreadthFirstVisit() {
TreeMap<ConstraintIndexHasseNode, String>
nuGenerationNodes = new TreeMap<ConstraintIndexHasseNode, String>();
StringBuilder sBuil = new StringBuilder();
int level = 1;
for (TaskChar childTCh : hasseDiagram.root.children.keySet()) {
nuGenerationNodes.put(hasseDiagram.root.children.get(childTCh), childTCh.toString());
sBuil.append(childTCh);
sBuil.append("\n");
}
sBuil.append("--------========\n End of level ");
sBuil.append(level);
sBuil.append("\n========--------\n");
sBuil.append(printInBreadthFirstVisit(nuGenerationNodes, ++level));
return sBuil.toString();
}
public String printInBreadthFirstVisit(TreeMap<ConstraintIndexHasseNode, String> currentGenerationNodes, int level) {
StringBuilder sBuil = new StringBuilder();
TreeMap<ConstraintIndexHasseNode, String>
nuGenerationNodes = new TreeMap<ConstraintIndexHasseNode, String>();
String nuHistory = null;
ConstraintIndexHasseNode nuChild = null;
for (ConstraintIndexHasseNode currentGenerationNode : currentGenerationNodes.keySet()) {
for (TaskChar childTCh : currentGenerationNode.children.keySet()) {
nuHistory = currentGenerationNodes.get(currentGenerationNode) + " " + childTCh;
nuChild = currentGenerationNode.children.get(childTCh);
nuGenerationNodes.put(nuChild, nuHistory);
sBuil.append(" " + nuHistory);
sBuil.append("\n child of \n");
sBuil.append(" " + currentGenerationNodes.get(currentGenerationNode));
if (nuChild.uncles.size() > 0) {
sBuil.append("\n"
+" and nephew of \n");
sBuil.append(" ");
for (ConstraintIndexHasseNode uncle : nuChild.uncles) {
sBuil.append(currentGenerationNodes.get(uncle));
sBuil.append(" , ");
}
}
sBuil.append('\n');
}
}
if (sBuil.length() > 0)
sBuil.append(printInBreadthFirstVisit(nuGenerationNodes, ++level));
return sBuil.toString();
}
} | 2,643 | 29.744186 | 118 | java |
Janus | Janus-master/src/minerful/index/ConstraintIndexHasseNode.java | package minerful.index;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.UUID;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
public class ConstraintIndexHasseNode implements Comparable<ConstraintIndexHasseNode> {
public HashMap<Class<? extends Constraint>, Constraint> constraints;
public final TaskCharSet indexedTaskCharSet;
public final ConstraintIndexHasseNode parent;
public final Collection<ConstraintIndexHasseNode> uncles;
public final SortedMap<TaskChar, ConstraintIndexHasseNode> children;
public final UUID identifier;
public ConstraintIndexHasseNode(
ConstraintIndexHasseNode parent,
TaskCharSet indexedTaskCharSet,
SortedMap<TaskChar, ConstraintIndexHasseNode> children) {
this.identifier = UUID.randomUUID();
this.constraints = new HashMap<Class<? extends Constraint>, Constraint>(
MetaConstraintUtils.NUMBER_OF_DISCOVERABLE_RELATION_CONSTRAINT_TEMPLATES,
(float) 1.0);
this.uncles = new ArrayList<ConstraintIndexHasseNode>();
this.parent = parent;
this.children = children;
this.indexedTaskCharSet = indexedTaskCharSet;
}
@Override
public String toString() {
StringBuilder sBuil = new StringBuilder();
sBuil.append("\n");
sBuil.append("[ID=");
sBuil.append(this.identifier);
sBuil.append("]\n");
for (Class<? extends Constraint> key : this.constraints.keySet()) {
sBuil.append(this.constraints.get(key));
}
for (TaskChar traversingKey : this.children.keySet()) {
sBuil.append("\n");
sBuil.append(traversingKey);
sBuil.append(this.children.get(traversingKey).toString());
}
return sBuil.toString();
}
public Collection<ConstraintIndexHasseNode> getParentAndUncles() {
Collection<ConstraintIndexHasseNode> relatives = new ArrayList<ConstraintIndexHasseNode>(
this.uncles.size() +
(parent == null ? 0 : 1)
);
if (parent != null)
relatives.add(parent);
if (uncles != null && uncles.size() > 0)
relatives.addAll(uncles);
return relatives;
}
public String toPrefixedPathString() {
return this.toPrefixedPathString("");
}
public String toPrefixedPathString(String prefix) {
StringBuilder sBuil = new StringBuilder();
sBuil.append("\n");
for (Constraint constraint : this.constraints.values()) {
sBuil.append("Constraints: ");
// if (!constraint.redundant) {
sBuil.append(constraint);
sBuil.append(" => ");
sBuil.append(constraint.getSupport());
if (constraint.isRedundant())
sBuil.append(" (redundant)");
sBuil.append("\n");
// }
}
String prefixForRecursion = "";
for (TaskChar traversingKey : this.children.keySet()) {
prefixForRecursion = prefix + '.' + traversingKey;
sBuil.append("\n");
sBuil.append(prefixForRecursion);
sBuil.append(this.children.get(traversingKey).toPrefixedPathString(prefixForRecursion));
}
return sBuil.toString();
}
ConstraintIndexHasseNode(ConstraintIndexHasseNode parent, TaskCharSet taskCharSet) {
this(parent, taskCharSet, new TreeMap<TaskChar, ConstraintIndexHasseNode>());
}
public void addConstraint(Constraint c) {
this.constraints.put(c.getClass(), c);
}
@Override
public int compareTo(ConstraintIndexHasseNode o) {
return this.identifier.compareTo(o.identifier);
}
} | 3,449 | 30.081081 | 91 | java |
Janus | Janus-master/src/minerful/index/ConstraintIndexHassePruner.java | package minerful.index;
import java.util.Set;
import java.util.TreeSet;
import minerful.concept.ProcessModel;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import org.apache.log4j.Logger;
public class ConstraintIndexHassePruner extends ConstraintIndexHasseManager {
private static Logger logger = Logger.getLogger(ProcessModel.class.getCanonicalName());
private final boolean forOutBrancing;
public ConstraintIndexHassePruner(boolean isForOutBrancing, ConstraintIndexHasseDiagram hasseDiagram) {
super(hasseDiagram);
this.forOutBrancing = isForOutBrancing;
}
public void prune() {
this.labelRedundancyWrtSetContainment();
}
public Set<? extends Constraint> nonRedundantConstraints() {
Set<Constraint> nonRedundantConstraints = new TreeSet<Constraint>();
for (ConstraintIndexHasseNode firstLevelChild : this.hasseDiagram.root.children.values()) {
nonRedundantConstraints(nonRedundantConstraints, firstLevelChild);
}
return nonRedundantConstraints;
}
private void nonRedundantConstraints(Set<Constraint> constraintsToUpdate, ConstraintIndexHasseNode nodeToExplore) {
for (Constraint c : nodeToExplore.constraints.values()) {
if (!c.isRedundant())
constraintsToUpdate.add(c);
}
for (ConstraintIndexHasseNode child : nodeToExplore.children.values()) {
nonRedundantConstraints(constraintsToUpdate, child);
}
return;
}
private void labelRedundancyWrtSetContainment() {
if (this.forOutBrancing) {
/*
* E.g.,
* support ( Response(a, {b, c}) ) <= support ( Response(a, {b, c, d}) )
*/
for (Class<? extends Constraint> conClass : MetaConstraintUtils.getAllDiscoverableForwardRelationConstraintTemplates()) {
for (ConstraintIndexHasseNode sinkChild : this.hasseDiagram.getSinkNodes()) {
this.labelRedundancyWrtSetContainment(
sinkChild,
conClass,
NavDirection.UP);
}
}
/*
* E.g.,
* support ( Precedence(a, {b, c}) ) >= support ( Precedence(a, {b, c, d}) )
*/
// for (Class<? extends Constraint> conClass : MetaConstraintUtils.getAllPossibleBackwardsRelationConstraintTemplates()) {
// for (ConstraintIndexHasseNode firstChild : this.hasseDiagram.root.children.values()) {
// this.labelRedundancyWrtSetContainment(
// firstChild,
// conClass,
// NavDirection.DOWN);
// }
// }
}
else {
/*
* E.g.,
* support ( Response({a, b}, d) ) >= support ( Response({a, b, c}, d}) )
*/
// for (Class<? extends Constraint> conClass : MetaConstraintUtils.getAllPossibleOnwardsRelationConstraintTemplates()) {
// for (ConstraintIndexHasseNode firstChild : this.hasseDiagram.root.children.values()) {
// this.labelRedundancyWrtSetContainment(
// firstChild,
// conClass,
// NavDirection.DOWN);
// }
// }
/*
* E.g.,
* support ( Precedence({a, b}, d) ) <= support ( Precedence({a, b, c}, d}) )
*/
for (Class<? extends Constraint> conClass : MetaConstraintUtils.getAllDiscoverableBackwardRelationConstraintTemplates()) {
for (ConstraintIndexHasseNode sinkChild : this.hasseDiagram.getSinkNodes()) {
this.labelRedundancyWrtSetContainment(
sinkChild,
conClass,
NavDirection.UP);
}
}
}
/*
* Negative relation constraints behave always the same:
* e.g.,
* support ( NotCoExistence(a, {b, c}) ) >= support ( NotCoExistence(a, {b, c, d}) )
* as well as
* support ( NotCoExistence({a, b}, d) ) >= support ( NotCoExistence({a, b, c}, d}) )
*/
// for (Class<? extends Constraint> conClass : MetaConstraintUtils.getAllPossibleNegativeRelationConstraintTemplates()) {
// for (ConstraintIndexHasseNode firstChild : this.hasseDiagram.root.children.values()) {
// this.labelRedundancyWrtSetContainment(
// firstChild,
// conClass,
// NavDirection.DOWN);
// }
// }
}
private void labelRedundancyWrtSetContainment(
ConstraintIndexHasseNode nodeUnderAnalysis,
Class<? extends Constraint> conClass,
NavDirection explorationDirection) {
/*
********************************
* Policy: maximize support &
* IncreasingAlongHierarchy: true
* (branching on target, i.e., forward-target & out-branching OR backward-target & in-branching)
* =>
* explorationDirection: UP,
* from: sink
********************************
* =>
* Start from sink.
* Search for parents and uncles.
* If this node is associated to a parent or an uncle sharing the same support (at most, the parent/uncle's is lower),
* label this as redundant and proceed with that parent/uncle.
* Otherwise, label that parent/uncle and all ancestors as redundant, then return.
********************************
********************************
* Policy: maximize support &
* IncreasingAlongHierarchy: false
* (branching on target, i.e., forward-target & out-branching OR backward-target & in-branching
* +
* negative relation constraints)
* =>
* explorationDirection: DOWN,
* from: root's children
********************************
* =>
* Start from root's children.
* Search for children nodes.
* For each of them, if their support is equal to all the uncles and parents (it cannot be higher),
* mark this and all uncles/parents as redundant, the proceed with that child.
* Otherwise, mark the child and all descendants as redundant, then return.
*/
Constraint currentConstraint = null;
currentConstraint = nodeUnderAnalysis.constraints.get(conClass);
if (currentConstraint == null)
return;
Constraint
parentOrUncleConstraint = null,
childConstraint = null;
switch (explorationDirection) {
case UP:
for (ConstraintIndexHasseNode parentOrUncle : nodeUnderAnalysis.getParentAndUncles()) {
if (!parentOrUncle.equals(this.hasseDiagram.root)) {
parentOrUncleConstraint = parentOrUncle.constraints.get(conClass);
if (currentConstraint.getSupport() > parentOrUncleConstraint.getSupport()) {
logger.trace(currentConstraint + " has a support, " + currentConstraint.getSupport() + ", which is higher than his parent/uncle " + parentOrUncleConstraint + "'s one, " + parentOrUncleConstraint.getSupport() + " -> labeling " + parentOrUncleConstraint + " and its ancestors as redundant");
if (!parentOrUncleConstraint.isRedundant()) {
parentOrUncleConstraint.setRedundant(true);
propagateRedundancyLabel(parentOrUncle, conClass, explorationDirection);
}
} else {
logger.trace(currentConstraint + " has a support, " + currentConstraint.getSupport() + ", which is equal to or lower than his parent/uncle " + parentOrUncleConstraint + "'s one, " + parentOrUncleConstraint.getSupport() + " -> labeling this as redundant");
currentConstraint.setRedundant(true);
if (!parentOrUncleConstraint.isRedundant()) {
labelRedundancyWrtSetContainment(
parentOrUncle,
conClass,
explorationDirection
);
}
}
}
}
return;
case DOWN:
for (ConstraintIndexHasseNode child : nodeUnderAnalysis.children.values()) {
for (ConstraintIndexHasseNode childParentOrUncle : child.getParentAndUncles()) {
childConstraint = child.constraints.get(conClass);
parentOrUncleConstraint = childParentOrUncle.constraints.get(conClass);
if (parentOrUncleConstraint.getSupport() > childConstraint.getSupport()) {
logger.trace(parentOrUncleConstraint + " has a support, " + parentOrUncleConstraint.getSupport() + ", which is higher than his child " + childConstraint + "'s one, " + childConstraint.getSupport() + " -> labeling " + childConstraint + " as redundant");
childConstraint.setRedundant(true);
}
}
if (childConstraint.isRedundant()) {
logger.trace("At least a parent/uncle of " + childConstraint + " has a higher support -> labeling " + childConstraint + "' descendants as redundant");
propagateRedundancyLabel(child, conClass, explorationDirection);
} else {
labelRedundancyWrtSetContainment(
child,
conClass,
explorationDirection
);
}
}
return;
default:
break;
}
}
private void propagateRedundancyLabel(
ConstraintIndexHasseNode nodeUnderAnalysis,
Class<? extends Constraint> conClass,
NavDirection explorationDirection) {
if (nodeUnderAnalysis.equals(this.hasseDiagram.root))
return;
switch (explorationDirection) {
case UP:
for (ConstraintIndexHasseNode parentOrUncle : nodeUnderAnalysis.getParentAndUncles()) {
if (!parentOrUncle.equals(this.hasseDiagram.root)) {
if (!parentOrUncle.constraints.get(conClass).isRedundant()) {
logger.trace("Labeling " + parentOrUncle.constraints.get(conClass) + ", parent/uncle of " + nodeUnderAnalysis.constraints.get(conClass) + ", as redundant");
parentOrUncle.constraints.get(conClass).setRedundant(true);
propagateRedundancyLabel(parentOrUncle, conClass, explorationDirection);
}
}
}
break;
case DOWN:
for (ConstraintIndexHasseNode child : nodeUnderAnalysis.children.values()) {
if (!child.constraints.get(conClass).isRedundant()) {
logger.trace("Labeling " + child.constraints.get(conClass) + ", child of " + nodeUnderAnalysis.constraints.get(conClass) + ", as redundant");
child.constraints.get(conClass).setRedundant(true);
propagateRedundancyLabel(child, conClass, explorationDirection);
}
}
break;
default:
break;
}
return;
}
} | 9,507 | 36.730159 | 295 | java |
Janus | Janus-master/src/minerful/index/LinearConstraintsIndexFactory.java | package minerful.index;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.relation.RelationConstraint;
import minerful.index.comparator.allinone.HierarchyBasedComparator;
import minerful.index.comparator.allinone.InterestConfidenceBasedComparator;
import minerful.index.comparator.allinone.SupportBasedComparator;
import minerful.index.comparator.allinone.SupportConfidenceInterestFactorBasedComparator;
import minerful.index.comparator.allinone.SupportFamilyConfidenceInterestFactorHierarchyLevelBasedComparator;
public class LinearConstraintsIndexFactory {
public static ConstraintsBag createConstraintsBagCloneIndexedByTaskCharAndSupport(ConstraintsBag bag) {
ConstraintsBag bagCopy = (ConstraintsBag) bag.clone();
TreeSet<Constraint> reindexed = null;
for (TaskChar key : bagCopy.getTaskChars()) {
reindexed = new TreeSet<Constraint>(new SupportBasedComparator());
reindexed.addAll(bagCopy.getConstraintsOf(key));
bagCopy.eraseConstraintsOf(key);
}
return bagCopy;
}
public static ConstraintsBag indexByImpliedTaskChar(ConstraintsBag bag) {
ConstraintsBag bagCopy = new ConstraintsBag(bag.getTaskChars());
for (TaskChar key : bag.getTaskChars()) {
for (Constraint c : bag.getConstraintsOf(key)) {
if (c instanceof RelationConstraint) {
bagCopy.add(((RelationConstraint)c).getImplied(), c);
} else {
}
}
}
return bagCopy;
}
public static ConstraintsBag createConstraintsBagCloneIndexedByTaskCharAndInterest(ConstraintsBag bag) {
ConstraintsBag bagCopy = (ConstraintsBag) bag.clone();
TreeSet<Constraint> reindexed = null;
for (TaskChar key : bagCopy.getTaskChars()) {
reindexed = new TreeSet<Constraint>(new InterestConfidenceBasedComparator());
reindexed.addAll(bagCopy.getConstraintsOf(key));
bagCopy.eraseConstraintsOf(key);
}
return bagCopy;
}
/**
* The second coolest method I coded, ever.
* @param bag
* @return
*/
public static Map<TaskChar, Map<Class<? extends Constraint>, SortedSet<Constraint>>> indexByTaskCharConstraintTypeAndSupport(ConstraintsBag bag) {
Map<TaskChar, Map<Class<? extends Constraint>, SortedSet<Constraint>>> index =
new HashMap<TaskChar,
Map<Class<? extends Constraint>, SortedSet<Constraint>>>(bag.getTaskChars().size());
for (TaskChar taskChar : bag.getTaskChars()) {
index.put(taskChar,
indexByConstraintTypeAndSupport(
bag.getConstraintsOf(taskChar),
taskChar
)
);
}
return index;
}
public static Map<Class<? extends Constraint>, SortedSet<Constraint>> indexByConstraintTypeAndSupport(Set<? extends Constraint> discoveredConstraints, TaskChar taskChar) {
Collection<Class<? extends Constraint>> possibleConstraints = MetaConstraintUtils.ALL_DISCOVERABLE_CONSTRAINT_TEMPLATES;
Map<Class<? extends Constraint>, SortedSet<Constraint>> localIndex = new HashMap<Class<? extends Constraint>, SortedSet<Constraint>>(possibleConstraints.size());
for (Class<? extends Constraint> possibleConstraint : possibleConstraints) {
localIndex.put(possibleConstraint, new TreeSet<Constraint>(new SupportBasedComparator()));
}
for (Constraint constraint : discoveredConstraints) {
localIndex.get(constraint.getClass()).add(constraint);
}
return localIndex;
}
public static Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> indexByImplyingAndImplied(ConstraintsBag bag) {
return indexByImplyingAndImplied(bag, false);
}
public static Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> indexByImplyingAndImplied(ConstraintsBag bag, boolean onlyUnmarked) {
Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> map = new TreeMap<TaskChar, Map<TaskChar,NavigableSet<Constraint>>>();
Map<TaskChar, NavigableSet<Constraint>> subMap = null;
TaskCharSet impliedSet = null;
for (TaskChar tCh : bag.getTaskChars()) {
subMap = new TreeMap<TaskChar, NavigableSet<Constraint>>();
for (Constraint con : bag.getConstraintsOf(tCh)) {
if (!onlyUnmarked || !con.isMarkedForExclusion()) {
impliedSet = (
(con.getImplied() == null)
? new TaskCharSet(tCh)
: con.getImplied()
);
for (TaskChar implied : impliedSet.getTaskCharsArray()) {
if (!subMap.containsKey(implied)) {
subMap.put(implied, new TreeSet<Constraint>());
}
subMap.get(implied).add(con);
}
}
}
map.put(tCh, subMap);
}
return map;
}
public static Collection<Constraint> getAllUnmarkedConstraintsSortedByBoundsSupportFamilyConfidenceInterestFactorHierarchyLevel(
ConstraintsBag bag) {
Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> mapOfConstraintsIndexedByImplyingAndImplied =
LinearConstraintsIndexFactory.indexByImplyingAndImplied(bag, true);
return getAllConstraintsSortedByBoundsSupportFamilyConfidenceInterestFactorHierarchyLevel(mapOfConstraintsIndexedByImplyingAndImplied);
}
public static Collection<Constraint> getAllConstraintsSortedByBoundsSupportFamilyConfidenceInterestFactorHierarchyLevel(ConstraintsBag bag) {
Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> mapOfConstraintsIndexedByImplyingAndImplied =
LinearConstraintsIndexFactory.indexByImplyingAndImplied(bag, false);
return getAllConstraintsSortedByBoundsSupportFamilyConfidenceInterestFactorHierarchyLevel(mapOfConstraintsIndexedByImplyingAndImplied);
}
private static Collection<Constraint> getAllConstraintsSortedByBoundsSupportFamilyConfidenceInterestFactorHierarchyLevel(Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> mapOfConstraintsIndexedByImplyingAndImplied) {
List<TaskChar> taskCharsSortedByNumberOfConnections =
getTaskCharsSortedByNumberOfConnections(createMapOfConnections(mapOfConstraintsIndexedByImplyingAndImplied));
Collection<Constraint> constraints = new ArrayList<Constraint>();
Map<TaskChar, NavigableSet<Constraint>>
subMap = null,
subMapReverse = null;
Set<TaskChar>
taskCharsReverse = new TreeSet<TaskChar>(mapOfConstraintsIndexedByImplyingAndImplied.keySet());
SortedSet<Constraint> tmpReorderingSet = null;
// Starting from the activity having the highest number of constraints-based connections with other activities...
for (TaskChar tCh : taskCharsSortedByNumberOfConnections) {
// Get all constraints pertaining to tCh, indexed by the implied (target) activity
subMap = mapOfConstraintsIndexedByImplyingAndImplied.get(tCh);
// For every target activity
for (TaskChar tChRev : taskCharsReverse) {
if (subMap.containsKey(tChRev) && subMap.get(tChRev) != null && subMap.get(tChRev).size() > 0) {
tmpReorderingSet = new TreeSet<Constraint>(new SupportFamilyConfidenceInterestFactorHierarchyLevelBasedComparator());
tmpReorderingSet.addAll(subMap.get(tChRev));
constraints.addAll(tmpReorderingSet);
subMap.put(tChRev, null);
}
if (mapOfConstraintsIndexedByImplyingAndImplied.containsKey(tChRev)) {
subMapReverse = mapOfConstraintsIndexedByImplyingAndImplied.get(tChRev);
if (subMapReverse.containsKey(tCh) && subMapReverse.get(tCh) != null && subMapReverse.get(tCh).size() > 0) {
tmpReorderingSet = new TreeSet<Constraint>(new SupportFamilyConfidenceInterestFactorHierarchyLevelBasedComparator());
tmpReorderingSet.addAll(subMapReverse.get(tCh));
constraints.addAll(tmpReorderingSet);
subMapReverse.put(tCh, null);
}
}
}
}
return constraints;
}
// public static Map<TaskChar, Set<TaskChar>> createMapOfConnections(ConstraintsBag bag) {
// Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> map =
// LinearConstraintsIndexFactory.indexByImplyingAndImplied(bag);
//
// return createMapOfConnections(map);
// }
public static List<TaskChar> getTaskCharsSortedByNumberOfConnections(Map<TaskChar, Set<TaskChar>> map) {
TreeMap<Integer, Set<TaskChar>> orderingMap = new TreeMap<Integer, Set<TaskChar>>();
ArrayList<TaskChar> orderedTaskChars = new ArrayList<TaskChar>(map.keySet().size());
Integer howManyCorrelatedTasks = 0;
for (TaskChar tChr : map.keySet()) {
howManyCorrelatedTasks = map.get(tChr).size();
if (!orderingMap.containsKey(howManyCorrelatedTasks)) {
orderingMap.put(howManyCorrelatedTasks, new TreeSet<TaskChar>());
}
orderingMap.get(howManyCorrelatedTasks).add(tChr);
}
for (Integer key : orderingMap.descendingKeySet()) {
orderedTaskChars.addAll(orderingMap.get(key));
}
return orderedTaskChars;
}
public static Map<TaskChar, Set<TaskChar>> createMapOfConnections(
Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> map) {
Map<TaskChar, Set<TaskChar>> mapOfConnections =
new TreeMap<TaskChar, Set<TaskChar>>();
for (TaskChar tChr : map.keySet()) {
mapOfConnections.put(tChr, map.get(tChr).keySet());
}
return mapOfConnections;
}
public static SortedSet<Constraint> getAllConstraints(ConstraintsBag bag) {
SortedSet<Constraint> allConstraints = new TreeSet<Constraint>();
for (TaskChar tChr : bag.getTaskChars()) {
for (Constraint con : bag.getConstraintsOf(tChr)) {
allConstraints.add(con);
}
}
return allConstraints;
}
public static SortedSet<Constraint> getAllUnmarkedConstraints(ConstraintsBag bag) {
SortedSet<Constraint> allConstraints = new TreeSet<Constraint>();
for (TaskChar tChr : bag.getTaskChars()) {
for (Constraint con : bag.getConstraintsOf(tChr)) {
if (!con.isMarkedForExclusion()) {
allConstraints.add(con);
}
}
}
return allConstraints;
}
public static SortedSet<Constraint> getAllConstraintsSortedBySupport(ConstraintsBag bag) {
SortedSet<Constraint> allConstraints = new TreeSet<Constraint>(new SupportBasedComparator());
for (TaskChar tChr : bag.getTaskChars()) {
for (Constraint con : bag.getConstraintsOf(tChr)) {
allConstraints.add(con);
}
}
return allConstraints;
}
public static SortedSet<Constraint> getAllConstraintsSortedBySupportConfidenceInterestFactor(ConstraintsBag bag) {
SortedSet<Constraint> allConstraints = new TreeSet<Constraint>(new SupportConfidenceInterestFactorBasedComparator());
for (TaskChar tChr : bag.getTaskChars()) {
for (Constraint con : bag.getConstraintsOf(tChr)) {
allConstraints.add(con);
}
}
return allConstraints;
}
public static SortedSet<Constraint> getAllConstraintsSortedBySupportFamilyConfidenceInterestFactorHierarchyLevel(ConstraintsBag bag) {
SortedSet<Constraint> allConstraints = new TreeSet<Constraint>(new SupportFamilyConfidenceInterestFactorHierarchyLevelBasedComparator());
for (TaskChar tChr : bag.getTaskChars()) {
for (Constraint con : bag.getConstraintsOf(tChr)) {
allConstraints.add(con);
}
}
return allConstraints;
}
public static SortedSet<Constraint> getAllConstraintsSortedByInterest(ConstraintsBag bag) {
SortedSet<Constraint> allConstraints = new TreeSet<Constraint>(new InterestConfidenceBasedComparator());
for (TaskChar tChr : bag.getTaskChars()) {
for (Constraint con : bag.getConstraintsOf(tChr)) {
allConstraints.add(con);
}
}
return allConstraints;
}
public static SortedSet<Constraint> getAllConstraintsSortedByStrictness(ConstraintsBag bag) {
SortedSet<Constraint> allConstraints = new TreeSet<Constraint>(new HierarchyBasedComparator());
for (TaskChar tChr : bag.getTaskChars()) {
for (Constraint con : bag.getConstraintsOf(tChr)) {
allConstraints.add(con);
}
}
return allConstraints;
}
} | 11,930 | 40.141379 | 223 | java |
Janus | Janus-master/src/minerful/index/ModularConstraintsSorter.java | package minerful.index;
import java.util.Collection;
import java.util.Comparator;
import java.util.SortedSet;
import java.util.TreeSet;
import minerful.concept.constraint.Constraint;
import minerful.index.comparator.modular.ConstraintSortingPolicy;
import minerful.index.comparator.modular.ModularConstraintsComparatorFactory;
public class ModularConstraintsSorter {
public static boolean DEFAULT_LAST_LEVEL_COMPARATOR_IS_RANDOM = false;
private Collection<Constraint> constraints;
private ModularConstraintsComparatorFactory factory;
public ModularConstraintsSorter() {
}
public ModularConstraintsSorter(Collection<Constraint> constraints) {
this.setConstraints(constraints);
}
public void setConstraints(Collection<Constraint> constraints) {
this.constraints = constraints;
this.factory = new ModularConstraintsComparatorFactory(this.constraints);
}
public SortedSet<Constraint> sort(ConstraintSortingPolicy... policies) {
return this.sort(DEFAULT_LAST_LEVEL_COMPARATOR_IS_RANDOM, policies);
}
public SortedSet<Constraint> sort(boolean lastLevelComparatorIsRandom, ConstraintSortingPolicy... policies) {
if (this.constraints == null) {
throw new IllegalStateException("Constraints not already set for sorting");
}
SortedSet<Constraint> sortedConstraints = null;
Comparator<? super Constraint> cnsCompa = factory.createModularComparator(lastLevelComparatorIsRandom, policies);
sortedConstraints = new TreeSet<Constraint>(cnsCompa);
sortedConstraints.addAll(this.constraints);
return sortedConstraints;
}
public Comparator<? super Constraint> getComparator(ConstraintSortingPolicy... policies) {
return this.getComparator(DEFAULT_LAST_LEVEL_COMPARATOR_IS_RANDOM);
}
public Comparator<? super Constraint> getComparator(boolean lastLevelComparatorIsRandom, ConstraintSortingPolicy... policies) {
return factory.createModularComparator(lastLevelComparatorIsRandom, policies);
}
} | 1,939 | 34.925926 | 128 | java |
Janus | Janus-master/src/minerful/index/SortingPolicy.java | package minerful.index;
public interface SortingPolicy {
// Just a declarative interface, to be implemented by Enums
} | 120 | 23.2 | 60 | java |
Janus | Janus-master/src/minerful/index/comparator/allinone/CnsSortAllInOneDefaultPolicy.java | package minerful.index.comparator.allinone;
import minerful.index.SortingPolicy;
public enum CnsSortAllInOneDefaultPolicy implements SortingPolicy {
SUPPORT_FAMILY_CONFIDENCE_INTEREST_FACTOR_HIERARCHY_LEVEL,
BOUNDS_SUPPORT_FAMILY_CONFIDENCE_INTEREST_FACTOR_HIERARCHY_LEVEL
} | 278 | 33.875 | 67 | java |
Janus | Janus-master/src/minerful/index/comparator/allinone/HierarchyBasedComparator.java | package minerful.index.comparator.allinone;
import java.util.Comparator;
import minerful.concept.constraint.Constraint;
public class HierarchyBasedComparator implements Comparator<Constraint> {
@Override
public int compare(Constraint o1, Constraint o2) {
int result = Integer.valueOf(o1.getHierarchyLevel()).compareTo(Integer.valueOf(o1.getHierarchyLevel()));
return (
(result == 0)
? o1.compareTo(o2)
: result * (-1)
);
}
} | 452 | 25.647059 | 106 | java |
Janus | Janus-master/src/minerful/index/comparator/allinone/InterestConfidenceBasedComparator.java | package minerful.index.comparator.allinone;
import java.util.Comparator;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.relation.RelationConstraint;
public class InterestConfidenceBasedComparator implements Comparator<Constraint> {
@Override
public int compare(Constraint o1, Constraint o2) {
Double interestOfO1 = o1.getInterestFactor(),
interestOfO2 = o2.getInterestFactor();
int result = 0;
result = interestOfO1.compareTo(interestOfO2);
if (result == 0) {
interestOfO1 = o1.getConfidence();
interestOfO2 = o2.getConfidence();
result = interestOfO1.compareTo(interestOfO2);
}
return (
(result == 0)
? o1.compareTo(o2)
: result * (-1)
);
}
} | 731 | 24.241379 | 82 | java |
Janus | Janus-master/src/minerful/index/comparator/allinone/SupportBasedComparator.java | package minerful.index.comparator.allinone;
import java.util.Comparator;
import minerful.concept.constraint.Constraint;
public class SupportBasedComparator implements Comparator<Constraint> {
@Override
public int compare(Constraint o1, Constraint o2) {
int result = Double.valueOf(o1.getSupport()).compareTo(Double.valueOf(o2.getSupport()));
return (
(result == 0)
? o1.compareTo(o2)
: result * (-1)
);
}
} | 427 | 24.176471 | 90 | java |
Janus | Janus-master/src/minerful/index/comparator/allinone/SupportConfidenceInterestFactorBasedComparator.java | package minerful.index.comparator.allinone;
import java.util.Comparator;
import minerful.concept.constraint.Constraint;
public class SupportConfidenceInterestFactorBasedComparator implements Comparator<Constraint> {
@Override
public int compare(Constraint o1, Constraint o2) {
int result = Double.valueOf(o1.getSupport()).compareTo(o2.getSupport());
if (result == 0) {
result = Double.valueOf(o1.getConfidence()).compareTo(o2.getConfidence());
if (result == 0) {
result = Double.valueOf(o1.getInterestFactor()).compareTo(o2.getInterestFactor());
if (result == 0) {
result = o1.compareTo(o2);
}
}
}
return result * (-1);
}
} | 663 | 29.181818 | 95 | java |
Janus | Janus-master/src/minerful/index/comparator/allinone/SupportFamilyConfidenceInterestFactorHierarchyLevelBasedComparator.java | package minerful.index.comparator.allinone;
import java.util.Comparator;
import minerful.concept.constraint.Constraint;
public class SupportFamilyConfidenceInterestFactorHierarchyLevelBasedComparator implements Comparator<Constraint> {
@Override
public int compare(Constraint o1, Constraint o2) {
int result = Double.valueOf(o1.getSupport()).compareTo(o2.getSupport());
if (result == 0) {
result = o1.getFamily().compareTo(o2.getFamily()) * (-1);
if (result == 0) {
result = Double.valueOf(o1.getConfidence()).compareTo(o2.getConfidence());
if (result == 0) {
result = Double.valueOf(o1.getInterestFactor()).compareTo(o2.getInterestFactor());
if (result == 0) {
result = Integer.valueOf(o1.getHierarchyLevel()).compareTo(Integer.valueOf(o2.getHierarchyLevel()));
if (result == 0) {
result = o1.compareTo(o2);
}
}
}
}
}
return result * (-1);
}
} | 919 | 31.857143 | 115 | java |
Janus | Janus-master/src/minerful/index/comparator/allinone/TemplateAndParametersBasedComparator.java | package minerful.index.comparator.allinone;
import minerful.concept.constraint.Constraint;
public class TemplateAndParametersBasedComparator extends TemplateBasedComparator {
@Override
public int compare(Constraint o1, Constraint o2) {
int result = super.compare(o1, o2);
if (result == 0) {
result = o1.compareTo(o2);
}
return result;
}
} | 354 | 24.357143 | 83 | java |
Janus | Janus-master/src/minerful/index/comparator/allinone/TemplateBasedComparator.java | package minerful.index.comparator.allinone;
import java.util.Comparator;
import minerful.concept.constraint.Constraint;
public class TemplateBasedComparator implements Comparator<Constraint> {
@Override
public int compare(Constraint o1, Constraint o2) {
return o1.getTemplateName().compareTo(o2.getTemplateName());
}
} | 326 | 26.25 | 72 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/ConstraintSortingPolicy.java | package minerful.index.comparator.modular;
import minerful.index.SortingPolicy;
/**
* Specifies the order in which constraints are sorted when it comes to scan them one by one.
* @author Claudio Di Ciccio
*/
public enum ConstraintSortingPolicy implements SortingPolicy {
/** Support, confidence level, and interest factor, in descending order */
SUPPORTCONFIDENCEINTERESTFACTOR,
/** Family (existence constraints first, then relation constraints, ...), and then position in the subsumption hierarchy (e.g., ChainPrecedence first, then AlternatePrecedence) */
FAMILYHIERARCHY,
/** Descending number of connected tasks by means of relation constraints */
ACTIVATIONTARGETBONDS,
/** Default, i.e., based on the compareTo() method of the constraints under analysis */
DEFAULT,
/** Random sorting */
RANDOM
} | 818 | 39.95 | 180 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/ModularConstraintsComparator.java | package minerful.index.comparator.modular;
import java.util.Comparator;
import org.apache.log4j.Logger;
import minerful.concept.constraint.Constraint;
public abstract class ModularConstraintsComparator implements Comparator<Constraint> {
protected static Logger logger = Logger.getLogger(ModularConstraintsComparator.class);
private ModularConstraintsComparator secondLevelComparator;
public ModularConstraintsComparator(ModularConstraintsComparator secondLevelComparator) {
this.secondLevelComparator = secondLevelComparator;
}
public ModularConstraintsComparator() {
this.secondLevelComparator = null;
}
/*
* Does basically nothing but invoking the second-level comparator, if any
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/
@Override
public int compare(Constraint o1, Constraint o2) {
if (this.secondLevelComparator != null)
return this.secondLevelComparator.compare(o1, o2);
return o1.compareTo(o2);
}
} | 974 | 29.46875 | 90 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/ModularConstraintsComparatorFactory.java | package minerful.index.comparator.modular;
import java.util.Collection;
import minerful.concept.constraint.Constraint;
import minerful.index.comparator.modular.modules.ModActivationTargetBondsBasedComparator;
import minerful.index.comparator.modular.modules.ModConfidenceBasedComparator;
import minerful.index.comparator.modular.modules.ModDefaultComparator;
import minerful.index.comparator.modular.modules.ModFamilyBasedComparator;
import minerful.index.comparator.modular.modules.ModHierarchyBasedComparator;
import minerful.index.comparator.modular.modules.ModInterestFactorBasedComparator;
import minerful.index.comparator.modular.modules.ModRandomComparator;
import minerful.index.comparator.modular.modules.ModSupportBasedComparator;
public class ModularConstraintsComparatorFactory {
private Collection<Constraint> constraints;
public ModularConstraintsComparatorFactory(Collection<Constraint> constraints) {
this.constraints = constraints;
}
public ModularConstraintsComparator createModularComparator(ConstraintSortingPolicy... types) {
return createModularComparator(true, types);
}
public ModularConstraintsComparator createModularComparator(boolean lastLevelComparatorIsRandom, ConstraintSortingPolicy... types) {
// Starting from the last one (i.e., the one which discriminates at the finest level of granularity
int i = types.length - 1;
ModularConstraintsComparator
subCompa = (
lastLevelComparatorIsRandom
? new ModRandomComparator(this.constraints)
: ( types.length > 1
? createModularComparator(types[i--])
: new ModDefaultComparator()
)
),
compa = subCompa;
for (; i >= 0; i--) {
compa = createModularComparator(subCompa, types[i]);
subCompa = compa;
}
return compa;
}
private ModularConstraintsComparator createModularComparator(ConstraintSortingPolicy type) {
return createModularComparator(null, type);
}
private ModularConstraintsComparator createModularComparator(ModularConstraintsComparator nextLevelComparator, ConstraintSortingPolicy type) {
ModularConstraintsComparator
compa = null,
subCompa = null;
switch(type) {
case ACTIVATIONTARGETBONDS:
compa = new ModActivationTargetBondsBasedComparator(nextLevelComparator, this.constraints);
break;
case FAMILYHIERARCHY:
subCompa = new ModHierarchyBasedComparator(nextLevelComparator);
compa = new ModFamilyBasedComparator(subCompa);
break;
case SUPPORTCONFIDENCEINTERESTFACTOR:
subCompa = new ModInterestFactorBasedComparator(nextLevelComparator);
compa = new ModConfidenceBasedComparator(subCompa);
subCompa = compa;
compa = new ModSupportBasedComparator(subCompa);
break;
case RANDOM:
compa = new ModRandomComparator(nextLevelComparator, constraints);
break;
case DEFAULT:
compa = new ModDefaultComparator(nextLevelComparator);
break;
default:
throw new UnsupportedOperationException("Modular comparator " + type + " is not yet implemented.");
}
return compa;
}
} | 3,013 | 35.756098 | 143 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/modules/ModActivationTargetBondsBasedComparator.java | package minerful.index.comparator.modular.modules;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import minerful.concept.TaskChar;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintFamily;
import minerful.concept.constraint.ConstraintFamily.ConstraintImplicationVerse;
import minerful.concept.constraint.relation.RelationConstraint;
import minerful.index.comparator.modular.ModularConstraintsComparator;
public class ModActivationTargetBondsBasedComparator extends ModularConstraintsComparator {
private static class ActivationTargetBondsBasedComparator implements Comparator<TaskChar> {
private Map<TaskChar, Set<TaskChar>> relatedActivitiesPerActivity;
public ActivationTargetBondsBasedComparator(
SortedMap<TaskChar, Set<TaskChar>> relatedActivitiesPerActivity) {
this.relatedActivitiesPerActivity = relatedActivitiesPerActivity;
}
@Override
public int compare(TaskChar o1, TaskChar o2) {
int result = o1.compareTo(o2);
if (result != 0) {
int triggerComparison = 0;
if (relatedActivitiesPerActivity.containsKey(o1) && relatedActivitiesPerActivity.containsKey(o2)) {
triggerComparison = new Integer(relatedActivitiesPerActivity.get(o1).size()).compareTo(relatedActivitiesPerActivity.get(o2).size());
triggerComparison *= -1;
}
if (triggerComparison != 0) {
result = triggerComparison;
}
}
return result;
}
}
private SortedMap<TaskChar, Set<TaskChar>> relatedActivitiesPerActivity;
private HashMap<TaskChar, Integer> activationTargetBondsIndex;
// private Map<TaskChar, Integer> activityIndexByChainedTargeting;
public ModActivationTargetBondsBasedComparator(Collection<Constraint> constraints) {
super();
this.computeOrderingFunction(constraints);
}
public ModActivationTargetBondsBasedComparator(ModularConstraintsComparator secondLevelComparator, Collection<Constraint> constraints) {
super(secondLevelComparator);
this.computeOrderingFunction(constraints);
}
public void computeOrderingFunction(Collection<Constraint> constraints) {
this.countRelatedActivitiesPerActivity(constraints);
this.createIndex();
}
private void createIndex() {
ActivationTargetBondsBasedComparator tasksComparator = new ActivationTargetBondsBasedComparator(relatedActivitiesPerActivity);
// Sort the taskChars by the number of links to other activities through activated constraints
SortedSet<TaskChar> tasksSortedByTargetedConstraintRelDegree = new TreeSet<TaskChar>(tasksComparator);
tasksSortedByTargetedConstraintRelDegree.addAll(relatedActivitiesPerActivity.keySet());
// Build an indexing hash-map for all tasks <task, index>
this.activationTargetBondsIndex = new HashMap<TaskChar, Integer>(tasksSortedByTargetedConstraintRelDegree.size(), (float) 1.0);
int i = 0;
for (TaskChar tCh : tasksSortedByTargetedConstraintRelDegree) {
this.activationTargetBondsIndex.put(tCh, new Integer(i++));
}
// System.err.println("Lurido merdone: " + this.activationTargetBondsIndex);
/*
// Start creating the final indexing
this.activityIndexByChainedTargeting = new HashMap<TaskChar, Integer>(tasksSortedByTargetedConstraintRelDegree.size(), (float) 1.0);
// The first task in the list is the one with the highest number of other activities bound by triggered constraints
int j = 0;
TaskChar tCh = null;
while (!tasksSortedByTargetedConstraintRelDegree.isEmpty()) {
tCh = tasksSortedByTargetedConstraintRelDegree.first();
this.activityIndexByChainedTargeting.put(tCh, j++);
// Let us remove the already considered task from the bag
tasksSortedByTargetedConstraintRelDegree.remove(tCh);
// The second task in the ordering should be taken from the set of activities with which the first element relates to
SortedSet<TaskChar> auxRelatedTaskCharsSortedByTriCReDeg = null;
// We order the triggered-constraint-related activities by their triggered-constraint-relationship-degree index
auxRelatedTaskCharsSortedByTriCReDeg = new TreeSet<TaskChar>(tasksComparator);
auxRelatedTaskCharsSortedByTriCReDeg.addAll(this.relatedActivitiesPerActivity.get(tCh));
Iterator<TaskChar> relatedSortedTaskCharsIterator = auxRelatedTaskCharsSortedByTriCReDeg.iterator();
// Now, let us pick the first from this set...
while (relatedSortedTaskCharsIterator.hasNext()) {
tCh = relatedSortedTaskCharsIterator.next();
// If it is not already ranked...
if (!this.activityIndexByChainedTargeting.containsKey(tCh)) {
}
}
// ... add it to the index list ...
this.activityIndexByChainedTargeting.put(tCh, j++);
// ... remove it from the aux bag of task chars
tasksSortedByTargetedConstraintRelDegree.remove(tCh);
// ... and proceed with the depth-first search
auxRelatedTaskCharsSortedByTriCReDeg = new TreeSet<TaskChar>(tasksComparator);
auxRelatedTaskCharsSortedByTriCReDeg.addAll(this.relatedActivitiesPerActivity.get(tCh));
}
*/
}
private void countRelatedActivitiesPerActivity(Collection<Constraint> constraints) {
Set<TaskChar> relatedActivities = null;
RelationConstraint relaCon = null;
SortedMap<TaskChar, Set<TaskChar>> auxRelatedActivitiesPerActivity = new TreeMap<TaskChar, Set<TaskChar>>();
for (Constraint con : constraints) {
for (TaskChar base : con.getBase().getTaskCharsArray()) {
if (!auxRelatedActivitiesPerActivity.keySet().contains(base)) {
auxRelatedActivitiesPerActivity.put(base, new TreeSet<TaskChar>());
}
relatedActivities = auxRelatedActivitiesPerActivity.get(base);
if (con.getImplied() != null) {
for (TaskChar implied : con.getImplied().getTaskCharsArray()) {
relatedActivities.add(implied);
}
} // in the case of existence constraints, add the base itself!
else {
relatedActivities.add(base);
}
} // in the case of mutual relation constraints, also the second parameter is an activation!
if (con.getFamily() == ConstraintFamily.RELATION) {
relaCon = (RelationConstraint) con;
if (relaCon.getImplicationVerse() == ConstraintImplicationVerse.BOTH) {
for (TaskChar revBase : con.getImplied().getTaskCharsArray()) {
if (!auxRelatedActivitiesPerActivity.keySet().contains(revBase)) {
auxRelatedActivitiesPerActivity.put(revBase, new TreeSet<TaskChar>());
}
relatedActivities = auxRelatedActivitiesPerActivity.get(revBase);
for (TaskChar revImplied : con.getBase().getTaskCharsArray()) {
relatedActivities.add(revImplied);
}
}
}
}
}
// System.err.println("Lurido merdone: " + auxRelatedActivitiesPerActivity);
this.relatedActivitiesPerActivity = auxRelatedActivitiesPerActivity;
}
private Integer computeIndex(Constraint con) {
Integer
index = Integer.MAX_VALUE,
comparison = Integer.MAX_VALUE;
for (TaskChar tCh : con.getBase().getTaskCharsArray()) {
comparison = this.activationTargetBondsIndex.get(tCh);
index = (index < comparison ? index : comparison);
}
//System.err.println("Lurido merdone: merdonazzo: per " + con + " index est " + index);
if (con.getFamily() == ConstraintFamily.RELATION
&& ((RelationConstraint) con).getImplicationVerse() == ConstraintImplicationVerse.BOTH) {
//System.err.println("Lurido merdone: merdonazzo: ah, ma quisht est un bottimplichescionvers!");
for (TaskChar tCh : con.getImplied().getTaskCharsArray()) {
comparison = this.activationTargetBondsIndex.get(tCh);
index = (index < comparison ? index : comparison);
}
//System.err.println("Lurido merdone: merdonazzo: allora per " + con + " mo index fa " + index);
}
return index;
}
@Override
public int compare(Constraint o1, Constraint o2) {
int result = this.computeIndex(o1).compareTo(this.computeIndex(o2));
//System.err.println("Lurido merdone: merdonazzo: " + o1 + " against " + o2 + " fa " + result);
if (result == 0)
return super.compare(o1, o2);
return result;
}
} | 8,119 | 42.191489 | 137 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/modules/ModConfidenceBasedComparator.java | package minerful.index.comparator.modular.modules;
import minerful.concept.constraint.Constraint;
import minerful.index.comparator.modular.ModularConstraintsComparator;
public class ModConfidenceBasedComparator extends ModularConstraintsComparator {
public ModConfidenceBasedComparator() {
super();
}
public ModConfidenceBasedComparator(ModularConstraintsComparator secondLevelComparator) {
super(secondLevelComparator);
}
@Override
public int compare(Constraint o1, Constraint o2) {
Double
confidenceOfO1 = o1.getConfidence(),
confidenceOfO2 = o2.getConfidence();
int result = 0;
result = confidenceOfO1.compareTo(confidenceOfO2);
return (
(result == 0)
? super.compare(o1, o2)
: result * (-1)
);
}
} | 756 | 23.419355 | 90 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/modules/ModDefaultComparator.java | package minerful.index.comparator.modular.modules;
import minerful.concept.constraint.Constraint;
import minerful.index.comparator.modular.ModularConstraintsComparator;
public class ModDefaultComparator extends ModularConstraintsComparator {
public ModDefaultComparator() {
super();
}
public ModDefaultComparator(ModularConstraintsComparator secondLevelComparator) {
super(secondLevelComparator);
}
@Override
public int compare(Constraint o1, Constraint o2) {
int result = o1.compareTo(o2);
return (
(result == 0)
? super.compare(o1, o2)
: result
);
}
} | 595 | 21.923077 | 82 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/modules/ModFamilyBasedComparator.java | package minerful.index.comparator.modular.modules;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintSubFamilyComparator;
import minerful.index.comparator.modular.ModularConstraintsComparator;
public class ModFamilyBasedComparator extends ModularConstraintsComparator {
private ConstraintSubFamilyComparator subFamilyComparator = new ConstraintSubFamilyComparator();
public ModFamilyBasedComparator() {
super();
}
public ModFamilyBasedComparator(ModularConstraintsComparator secondLevelComparator) {
super(secondLevelComparator);
}
@Override
public int compare(Constraint o1, Constraint o2) {
int result = o1.getFamily().compareTo(o2.getFamily());
if (result == 0) {
result = subFamilyComparator.compare(o1.getSubFamily(),o2.getSubFamily());
if (result == 0) {
return super.compare(o1, o2);
}
}
return result;
}
} | 894 | 29.862069 | 97 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/modules/ModHierarchyBasedComparator.java | package minerful.index.comparator.modular.modules;
import minerful.concept.constraint.Constraint;
import minerful.index.comparator.modular.ModularConstraintsComparator;
public class ModHierarchyBasedComparator extends ModularConstraintsComparator {
public ModHierarchyBasedComparator() {
super();
}
public ModHierarchyBasedComparator(ModularConstraintsComparator secondLevelComparator) {
super(secondLevelComparator);
}
@Override
public int compare(Constraint o1, Constraint o2) {
// Sort from the most restricting one, to the least restricting one
int result = Integer.valueOf(o1.getHierarchyLevel()).compareTo(Integer.valueOf(o2.getHierarchyLevel())) * -1;
return (
(result == 0)
? super.compare(o1, o2)
: result
);
}
} | 760 | 29.44 | 111 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/modules/ModInterestFactorBasedComparator.java | package minerful.index.comparator.modular.modules;
import minerful.concept.constraint.Constraint;
import minerful.index.comparator.modular.ModularConstraintsComparator;
public class ModInterestFactorBasedComparator extends ModularConstraintsComparator {
public ModInterestFactorBasedComparator() {
super();
}
public ModInterestFactorBasedComparator(ModularConstraintsComparator secondLevelComparator) {
super(secondLevelComparator);
}
@Override
public int compare(Constraint o1, Constraint o2) {
Double
interestOfO1 = o1.getInterestFactor(),
interestOfO2 = o2.getInterestFactor();
int result = 0;
result = interestOfO1.compareTo(interestOfO2);
return (
(result == 0)
? super.compare(o1, o2)
: result * (-1)
);
}
} | 766 | 24.566667 | 94 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/modules/ModRandomComparator.java | package minerful.index.comparator.modular.modules;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import minerful.concept.constraint.Constraint;
import minerful.index.comparator.modular.ModularConstraintsComparator;
import org.apache.commons.lang3.StringUtils;
public class ModRandomComparator extends ModularConstraintsComparator {
private HashMap<Constraint, Integer> randomIndex;
// private Map<TaskChar, Integer> activityIndexByChainedTargeting;
public ModRandomComparator(Collection<Constraint> constraints) {
super();
this.computeOrderingFunction(constraints);
}
public ModRandomComparator(ModularConstraintsComparator secondLevelComparator, Collection<Constraint> constraints) {
super(secondLevelComparator);
this.computeOrderingFunction(constraints);
}
public void computeOrderingFunction(Collection<Constraint> constraints) {
this.createIndex(constraints);
}
private void createIndex(Collection<Constraint> constraints) {
ArrayList<Constraint> shuffledConstraints = new ArrayList<Constraint>(constraints);
Collections.shuffle(shuffledConstraints);
this.randomIndex = new HashMap<Constraint, Integer>(shuffledConstraints.size(), (float)1.0);
Integer i = 0;
for (Constraint cns : shuffledConstraints) {
this.randomIndex.put(cns, i++);
}
logger.trace("Sorted constraints: " + StringUtils.join(shuffledConstraints, ", "));
}
private Integer computeIndex(Constraint con) {
return randomIndex.get(con);
}
@Override
public int compare(Constraint o1, Constraint o2) {
int result = this.computeIndex(o1).compareTo(this.computeIndex(o2));
//System.err.println("Lurido merdone: merdonazzo: " + o1 + " against " + o2 + " fa " + result);
if (result == 0)
return super.compare(o1, o2);
return result;
}
} | 1,831 | 32.309091 | 117 | java |
Janus | Janus-master/src/minerful/index/comparator/modular/modules/ModSupportBasedComparator.java | package minerful.index.comparator.modular.modules;
import minerful.concept.constraint.Constraint;
import minerful.index.comparator.modular.ModularConstraintsComparator;
public class ModSupportBasedComparator extends ModularConstraintsComparator {
public ModSupportBasedComparator() {
super();
}
public ModSupportBasedComparator(ModularConstraintsComparator secondLevelComparator) {
super(secondLevelComparator);
}
@Override
public int compare(Constraint o1, Constraint o2) {
Double
supportOfO1 = o1.getSupport(),
supportOfO2 = o2.getSupport();
int result = 0;
result = supportOfO1.compareTo(supportOfO2);
return (
(result == 0)
? super.compare(o1, o2)
: result * (-1)
);
}
} | 727 | 23.266667 | 87 | java |
Janus | Janus-master/src/minerful/io/ConstraintsPrinter.java | package minerful.io;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.Collection;
import java.util.Locale;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.TreeSet;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import minerful.automaton.AutomatonFactory;
import minerful.automaton.SubAutomaton;
import minerful.automaton.concept.weight.WeightedAutomaton;
import minerful.automaton.encdec.AutomatonDotPrinter;
import minerful.automaton.encdec.TsmlEncoder;
import minerful.automaton.encdec.WeightedAutomatonFactory;
import minerful.concept.AbstractTaskClass;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
import minerful.index.LinearConstraintsIndexFactory;
import minerful.io.encdec.TaskCharEncoderDecoder;
import minerful.io.encdec.csv.CsvEncoder;
import minerful.io.encdec.declaremap.DeclareMapEncoderDecoder;
import minerful.io.encdec.declaremap.DeclareMapReaderWriter;
import minerful.logparser.LogParser;
import dk.brics.automaton.Automaton;
public class ConstraintsPrinter {
private static final String MACHINE_READABLE_RESULTS_SUPPORT_TEXT_SIGNAL = "Measures: ";
public static final String MACHINE_READABLE_RESULTS_LEGEND_TEXT_SIGNAL = "Legend: ";
public static final String MACHINE_READABLE_RESULTS_TEXT_SIGNAL = "Machine-readable results: ";
public static final int SUBAUTOMATA_MAXIMUM_ACTIVITIES_BEFORE_AND_AFTER = // 3;
AutomatonFactory.NO_LIMITS_IN_ACTIONS_FOR_SUBAUTOMATA;
public static final double MINIMUM_THRESHOLD = 0.0;
// private static final int HALF_NUMBER_OF_BARS = 10;
// FIXME Make it user-customisable
private static final boolean PRINT_ONLY_IF_ADDITIONAL_INFO_IS_GIVEN = false;
private ProcessModel processModel;
private Automaton processAutomaton;
private NavigableMap<Constraint, String> additionalCnsIndexedInfo;
public ConstraintsPrinter(ProcessModel processModel) {
this(processModel, null);
}
public ConstraintsPrinter(ProcessModel processModel,
NavigableMap<Constraint, String> additionalCnsIndexedInfo) {
this.processModel = processModel;
this.additionalCnsIndexedInfo = (additionalCnsIndexedInfo == null) ? new TreeMap<Constraint, String>() : additionalCnsIndexedInfo;
}
public String printBag() {
StringBuilder sBld = new StringBuilder();
// The first pass is to understand how to pad the constraints' names
int
maxPadding = computePaddingForConstraintNames();
for (TaskChar key : this.processModel.bag.getTaskChars()) {
sBld.append("\n\t[");
sBld.append(key);
sBld.append("] => {\n"
+ "\t\t");
for (Constraint c : this.processModel.bag.getConstraintsOf(key)) {
if (!c.isMarkedForExclusion()) {
sBld.append(printConstraintsData(c, this.additionalCnsIndexedInfo.get(c), maxPadding)); //, HALF_NUMBER_OF_BARS));
sBld.append("\n\t\t");
}
}
sBld.append("\n\t}\n");
}
return sBld.toString();
}
public String printBagAsMachineReadable() {
return this.printBagAsMachineReadable(true, true, true);
}
public String printBagAsMachineReadable(boolean withNumericalIndex, boolean withTextSignals, boolean withHeaders) {
StringBuilder
sBufLegend = new StringBuilder(),
sBuffIndex = new StringBuilder(),
sBuffValues = new StringBuilder(),
superSbuf = new StringBuilder();
int i = 0;
ConstraintsBag redundaBag = this.processModel.bag.createRedundantCopy(this.processModel.bag.getTaskChars());
for (TaskChar key : redundaBag.getTaskChars()) {
for (Constraint c : redundaBag.getConstraintsOf(key)) {
if (withNumericalIndex) {
sBuffIndex.append(i+1);
sBuffIndex.append(';');
}
sBufLegend.append('\'');
// BUG-FIX: there is no reason why we have to flatten all non-word characters into "_".
// This creates tremendous issues with logs like BPIC 2012, where we have both
// "A_ACCEPTED" and "W_Completeren aanvraag"
sBufLegend.append(c.toString().replace("'", "\\'")); //.replaceAll("\\W", " ").trim().replaceAll(" ", "_"));
sBufLegend.append('\'');
sBufLegend.append(';');
sBuffValues.append(String.format(Locale.ENGLISH, "%.9f", c.getSupport() * 100));
sBuffValues.append(';');
sBufLegend.append(';');
sBuffValues.append(String.format(Locale.ENGLISH, "%.9f", c.getConfidence() * 100));
sBuffValues.append(';');
sBufLegend.append(';');
sBuffValues.append(String.format(Locale.ENGLISH, "%.9f", c.getInterestFactor() * 100));
sBuffValues.append(';');
i++;
}
}
if (withTextSignals) {
superSbuf.append(MACHINE_READABLE_RESULTS_TEXT_SIGNAL);
superSbuf.append("\r\n");
superSbuf.append(MACHINE_READABLE_RESULTS_LEGEND_TEXT_SIGNAL);
}
if (withNumericalIndex) {
superSbuf.append(sBuffIndex.substring(0, sBuffIndex.length() -1));
superSbuf.append("\r\n");
}
if (withHeaders) {
superSbuf.append(sBufLegend.substring(0, sBufLegend.length() -1));
superSbuf.append("\r\n");
if (i > 0)
superSbuf.append("'Support';'Confidence';'InterestF'");
for (int j = 1; j < i; j++) {
superSbuf.append(";'Support';'Confidence';'InterestF'");
}
superSbuf.append("\r\n");
}
if (withTextSignals) {
superSbuf.append(MACHINE_READABLE_RESULTS_SUPPORT_TEXT_SIGNAL);
}
superSbuf.append(sBuffValues.substring(0, sBuffValues.length() -1));
return superSbuf.toString();
}
/**
* Prints the constraints in a CSV format. The constraints that are marked for exclusion are not included in the print-out.
* @return A string containing the list of process model' constraints in a CSV format.
*/
public String printBagCsv() {
return this.printBagCsv(CsvEncoder.PRINT_OUT_ELEMENT.values());
}
/**
* Prints the CSV format of the constraints bag. The columns appearing in the file can be customised.
* @param columns A sorted set of columns. See the <code>PRINT_OUT_ELEMENT</code> enumeration.
* @return A CSV string containing the constraints bag.
*/
public String printBagCsv(CsvEncoder.PRINT_OUT_ELEMENT... columns) {
return new CsvEncoder().printAsCsv(
new TreeSet<CsvEncoder.PRINT_OUT_ELEMENT>(Arrays.asList(columns)),
this.processModel
);
}
private String printConstraintsCollection(Collection<Constraint> constraintsCollection) {
StringBuilder sBld = new StringBuilder();
// The first pass is to understand how to pad the constraints' names
int
maxPadding = computePaddingForConstraintNames(constraintsCollection),
i = 0;
for (Constraint c : constraintsCollection) {
if (!c.isMarkedForExclusion()) {
i++;
sBld.append("\n\t");
sBld.append(printConstraintsData(c, this.additionalCnsIndexedInfo.get(c), maxPadding)); //, HALF_NUMBER_OF_BARS));
}
}
sBld.append("\n\n");
sBld.append("Constraints shown: " + i + "\n");
return sBld.toString();
}
public String printUnfoldedBag() {
return printConstraintsCollection(LinearConstraintsIndexFactory.getAllConstraints(this.processModel.bag));
}
public String printUnfoldedBagOrderedBySupport() {
return printConstraintsCollection(LinearConstraintsIndexFactory.getAllConstraintsSortedBySupport(this.processModel.bag));
}
public String printUnfoldedBagOrderedByInterest() {
return printConstraintsCollection(LinearConstraintsIndexFactory.getAllConstraintsSortedByInterest(this.processModel.bag));
}
public int computePaddingForConstraintNames() {
return computePaddingForConstraintNames(LinearConstraintsIndexFactory.getAllConstraints(this.processModel.bag));
}
public int computePaddingForConstraintNames(Collection<Constraint> constraintsSet) {
int maxPadding = 0,
auxConstraintStringLength = 0;
for (Constraint c : constraintsSet) {
auxConstraintStringLength = c.toString().length();
if (maxPadding < auxConstraintStringLength) {
maxPadding = auxConstraintStringLength;
}
}
// As a rule of thumb...
maxPadding += 3;
return maxPadding;
}
public String printConstraintsData(Constraint constraint, String additionalInfo, int maxPadding) {//, int halfNumberOfBars) {
if (PRINT_ONLY_IF_ADDITIONAL_INFO_IS_GIVEN) {
if (additionalInfo == null || additionalInfo.isEmpty()) {
return "";
}
}
StringBuilder sBld = new StringBuilder();
// int barsCounter = -halfNumberOfBars;
// double relativeSupport = constraint.getRelativeSupport(supportThreshold);
sBld.append(String.format(Locale.ENGLISH, "%7.3f%% ", constraint.getSupport() * 100));
sBld.append(String.format("%-" + maxPadding + "s", constraint.toString()));
// sBld.append(String.format(Locale.ENGLISH, "%8.3f%% ", relativeSupport * 100));
// if (relativeSupport != 0) {
// for (; (barsCounter < relativeSupport * halfNumberOfBars && barsCounter <= 0); barsCounter++) {
// sBld.append(' ');
// }
// for (; (barsCounter >= relativeSupport * halfNumberOfBars && barsCounter <= 0) || (barsCounter < relativeSupport * halfNumberOfBars && barsCounter >= 0); barsCounter++) {
// sBld.append('|');
// }
// }
//
// for (; barsCounter <= halfNumberOfBars; barsCounter++) {
// sBld.append(' ');
// }
sBld.append(String.format(Locale.ENGLISH, " conf.: %7.3f; ", constraint.getConfidence()));
sBld.append(String.format(Locale.ENGLISH, " int'f: %7.3f; ", constraint.getInterestFactor()));
if (constraint.getFitness() != null) {
sBld.append(String.format(Locale.ENGLISH, " fit: %7.3f; ", constraint.getFitness()));
}
if (additionalInfo != null)
sBld.append(additionalInfo);
return sBld.toString();
}
public void saveAsConDecModel(File outFile) throws IOException {
DeclareMapEncoderDecoder deMapEnDec = new DeclareMapEncoderDecoder(processModel);
DeclareMapReaderWriter.marshal(outFile.getCanonicalPath(), deMapEnDec.createDeclareMap());
}
public String printWeightedXmlAutomaton(LogParser logParser, boolean skimIt) throws JAXBException {
if (this.processAutomaton == null)
processAutomaton = this.processModel.buildAutomaton();
WeightedAutomatonFactory wAF = new WeightedAutomatonFactory(TaskCharEncoderDecoder.getTranslationMap(this.processModel.bag));
WeightedAutomaton wAut = wAF.augmentByReplay(processAutomaton, logParser, skimIt);
if (wAut == null)
return null;
JAXBContext jaxbCtx = JAXBContext.newInstance(WeightedAutomaton.class);
Marshaller marsh = jaxbCtx.createMarshaller();
marsh.setProperty("jaxb.formatted.output", true);
StringWriter strixWriter = new StringWriter();
marsh.marshal(wAut, strixWriter);
strixWriter.flush();
StringBuffer strixBuffer = strixWriter.getBuffer();
// OINK
strixBuffer.replace(strixBuffer.indexOf(">", strixBuffer.indexOf("?>") + 3), strixBuffer.indexOf(">", strixBuffer.indexOf("?>") + 3),
" xmlns=\"" + ProcessModel.MINERFUL_XMLNS + "\"");
return strixWriter.toString();
}
public NavigableMap<String, String> printWeightedXmlSubAutomata(LogParser logParser) throws JAXBException {
Collection<SubAutomaton> partialAutomata =
// this.process.buildSubAutomata(ConstraintsPrinter.SUBAUTOMATA_MAXIMUM_ACTIVITIES_BEFORE_AND_AFTER);
this.processModel.buildSubAutomata();
WeightedAutomatonFactory wAF = new WeightedAutomatonFactory(TaskCharEncoderDecoder.getTranslationMap(this.processModel.bag));
NavigableMap<Character, AbstractTaskClass> idsNamesMap = TaskCharEncoderDecoder.getTranslationMap(this.processModel.bag);
NavigableMap<String, String> partialAutomataXmls = new TreeMap<String, String>();
WeightedAutomaton wAut = null;
StringWriter strixWriter = null;
StringBuffer strixBuffer = null;
JAXBContext jaxbCtx = JAXBContext.newInstance(WeightedAutomaton.class);
Marshaller marsh = jaxbCtx.createMarshaller();
marsh.setProperty("jaxb.formatted.output", true);
for (SubAutomaton partialAuto : partialAutomata) {
wAut = wAF.augmentByReplay(partialAuto.automaton, logParser, false, true);
if (wAut != null) {
strixWriter = new StringWriter();
marsh.marshal(wAut, strixWriter);
strixWriter.flush();
strixBuffer = strixWriter.getBuffer();
// OINK
strixBuffer.replace(strixBuffer.indexOf(">", strixBuffer.indexOf("?>") + 3), strixBuffer.indexOf(">", strixBuffer.indexOf("?>") + 3),
" xmlns=\"" + ProcessModel.MINERFUL_XMLNS + "\"");
partialAutomataXmls.put(idsNamesMap.get(partialAuto.basingCharacter).getName(), strixWriter.toString());
}
}
return partialAutomataXmls;
}
public String printDotAutomaton() {
if (this.processAutomaton == null)
processAutomaton = this.processModel.buildAutomaton();
NavigableMap<Character, String> stringMap = new TreeMap<Character, String>();
NavigableMap<Character, AbstractTaskClass> charToClassMap = TaskCharEncoderDecoder.getTranslationMap(this.processModel.bag);
for (Character key : charToClassMap.keySet())
stringMap.put(key, charToClassMap.get(key).getName());
return new AutomatonDotPrinter(stringMap).printDot(processAutomaton);
}
public String printTSMLAutomaton() {
if (this.processAutomaton == null)
processAutomaton = this.processModel.buildAutomaton();
NavigableMap<Character, String> idsNamesMap = new TreeMap<Character, String>();
NavigableMap<Character, AbstractTaskClass> charToClassMap = TaskCharEncoderDecoder.getTranslationMap(this.processModel.bag);
for (Character key : charToClassMap.keySet())
idsNamesMap.put(key, charToClassMap.get(key).getName());
return new TsmlEncoder(idsNamesMap).automatonToTSML(processAutomaton, this.processModel.getName());
}
public NavigableMap<String, String> printDotPartialAutomata() {
NavigableMap<String, String> partialAutomataDots = new TreeMap<String, String>();
Collection<SubAutomaton> partialAutomata =
this.processModel.buildSubAutomata(ConstraintsPrinter.SUBAUTOMATA_MAXIMUM_ACTIVITIES_BEFORE_AND_AFTER);
String dotFormattedAutomaton = null;
NavigableMap<Character, AbstractTaskClass> charToClassMap = TaskCharEncoderDecoder.getTranslationMap(this.processModel.bag);
NavigableMap<Character, String> idsNamesMap = new TreeMap<Character, String>();
for (Character key : charToClassMap.keySet())
idsNamesMap.put(key, charToClassMap.get(key).getName());
AutomatonDotPrinter autoDotPrinter = new AutomatonDotPrinter(idsNamesMap);
for (SubAutomaton partialAutomaton : partialAutomata) {
//dotFormattedAutomaton = partialAutomaton.automaton.toDot();
if (partialAutomaton.automaton.getInitialState().getTransitions().size() > 0) {
dotFormattedAutomaton = autoDotPrinter.printDot(partialAutomaton.automaton, partialAutomaton.basingCharacter);//.replaceIdentifiersWithActivityNamesInDotAutomaton(dotFormattedAutomaton, idsNamesMap, partialAutomaton.basingCharacter);
partialAutomataDots.put(idsNamesMap.get(partialAutomaton.basingCharacter), dotFormattedAutomaton);
}
}
return partialAutomataDots;
}
public ConstraintsBag getBag() {
return this.processModel.bag;
}
} | 15,807 | 41.380697 | 237 | java |
Janus | Janus-master/src/minerful/io/ProcessModelLoader.java | package minerful.io;
import java.io.File;
import minerful.concept.TaskCharArchive;
import org.processmining.plugins.declareminer.visualizing.AssignmentModel;
import minerful.concept.ProcessModel;
import minerful.io.encdec.ProcessModelEncoderDecoder;
import minerful.io.encdec.declaremap.DeclareMapEncoderDecoder;
import minerful.io.params.InputModelParameters;
import minerful.utils.MessagePrinter;
public class ProcessModelLoader {
/**
* Loads a process model from <code>inputFile</code>, either imported from a JSON-format file (see
* {@link ProcessModelEncoderDecoder#readFromJsonFile(File) ProcessModelEncoderDecoder.readFromJsonFile}),
* MINERful XML-format (see
* {@link ProcessModelEncoderDecoder#unmarshalProcessModel(File) ProcessModelEncoderDecoder.unmarshalProcessModel }),
* or Declare Map XML-format (see
* {@link DeclareMapEncoderDecoder#createMinerFulProcessModel() DeclareMapEncoderDecoder.createMinerFulProcessModel}),
* depending on the <code>inputLanguage</code> parameter.
*
* @param inputLanguage The input file formatting language
* @param inputFile The input file
* @return A {@link ProcessModel ProcessModel} instance read from <code>inputFile</code>,
* or <code>null</code> if such file does not exist or is not properly formatted
*/
public ProcessModel loadProcessModel(InputModelParameters.InputEncoding inputLanguage, File inputFile) {
ProcessModel inputProcess = null;
try {
switch (inputLanguage) {
case MINERFUL:
inputProcess = new ProcessModelEncoderDecoder().unmarshalProcessModel(inputFile);
break;
case JSON:
inputProcess = new ProcessModelEncoderDecoder().readFromJsonFile(inputFile);
break;
case DECLARE_MAP:
inputProcess = new DeclareMapEncoderDecoder(inputFile.getAbsolutePath()).createMinerFulProcessModel();
break;
default:
break;
}
} catch (Exception e) {
MessagePrinter.getInstance(this).error("Unreadable process model from file: " + inputFile.getAbsolutePath()
+ ". Check the file path or the specified encoding.", e);
}
return inputProcess;
}
/**
* Load a process model from a file with the assurance to respect the given encoding-mapping
*
* @param inputLanguage
* @param inputFile
* @param alphabet encoding-mapping
* @return
*/
public ProcessModel loadProcessModel(InputModelParameters.InputEncoding inputLanguage, File inputFile, TaskCharArchive alphabet) {
ProcessModel inputProcess = null;
try {
switch (inputLanguage) {
case MINERFUL:
inputProcess = new ProcessModelEncoderDecoder().unmarshalProcessModel(inputFile);
break;
case JSON:
inputProcess = new ProcessModelEncoderDecoder().readFromJsonFile(inputFile, alphabet);
break;
case DECLARE_MAP:
inputProcess = new DeclareMapEncoderDecoder(inputFile.getAbsolutePath()).createMinerFulProcessModel();
break;
default:
break;
}
} catch (Exception e) {
MessagePrinter.getInstance(this).error("Unreadable process model from file: " + inputFile.getAbsolutePath()
+ ". Check the file path or the specified encoding.", e);
}
return inputProcess;
}
public ProcessModel loadProcessModel(AssignmentModel declareMapModel) {
return new DeclareMapEncoderDecoder(declareMapModel).createMinerFulProcessModel();
}
} | 3,292 | 35.588889 | 131 | java |
Janus | Janus-master/src/minerful/io/encdec/DeclareConstraintTransferObject.java | package minerful.io.encdec;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Matcher;
import org.processmining.plugins.declareminer.visualizing.ActivityDefinition;
import org.processmining.plugins.declareminer.visualizing.ConstraintDefinition;
import org.processmining.plugins.declareminer.visualizing.Parameter;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.io.encdec.declaremap.DeclareMapEncoderDecoder;
import minerful.io.encdec.declaremap.DeclareMapTemplate;
import minerful.io.encdec.declaremap.DeclareMapToMinerFulTemplatesTranslator;
import minerful.io.encdec.pojo.ConstraintPojo;
public class DeclareConstraintTransferObject implements Comparable<DeclareConstraintTransferObject> {
public final DeclareMapTemplate declareMapTemplate;
public final Class<? extends Constraint> minerFulTemplate;
public final List<Set<String>> parameters;
public final Double support;
public final Double confidence;
public final Double interestFactor;
public DeclareConstraintTransferObject(Constraint con) {
this.minerFulTemplate = con.getClass();
this.declareMapTemplate = DeclareMapToMinerFulTemplatesTranslator.translateTemplateName(this.minerFulTemplate);
this.parameters = new ArrayList<Set<String>>();
List<TaskCharSet> params = con.getParameters();
Iterator<TaskCharSet> taskChaIterator = params.iterator();
Set<String> auxParamSet = null;
while (taskChaIterator.hasNext()) {
auxParamSet = new TreeSet<String>();
for (TaskChar tChars : taskChaIterator.next().getTaskCharsArray()) {
auxParamSet.add(tChars.taskClass.getName());
}
this.parameters.add(auxParamSet);
}
this.support = con.getSupport();
this.confidence = con.getConfidence();
this.interestFactor = con.getInterestFactor();
}
public DeclareConstraintTransferObject(ConstraintDefinition declareMapConstraint) {
this.declareMapTemplate = DeclareMapTemplate.fromName(declareMapConstraint.getName());
this.minerFulTemplate = DeclareMapToMinerFulTemplatesTranslator.translateTemplateName(this.declareMapTemplate);
this.parameters = new ArrayList<Set<String>>();
Collection<Parameter> params = declareMapConstraint.getParameters();
Set<String> auxParamSet = null;
for(Parameter p : declareMapConstraint.getParameters()){
auxParamSet = new TreeSet<String>();
for (ActivityDefinition ad : declareMapConstraint.getBranches(p)) {
auxParamSet.add(ad.getName());
}
this.parameters.add(auxParamSet);
}
Matcher
supMatcher = DeclareMapEncoderDecoder.SUPPORT_PATTERN.matcher(declareMapConstraint.getText().trim()),
confiMatcher = DeclareMapEncoderDecoder.CONFIDENCE_PATTERN.matcher(declareMapConstraint.getText().trim()),
inteFaMatcher = DeclareMapEncoderDecoder.INTEREST_FACTOR_PATTERN.matcher(declareMapConstraint.getText().trim());
this.support = (supMatcher.matches() && supMatcher.groupCount() > 0 ? Double.valueOf(supMatcher.group(1)) : Constraint.DEFAULT_SUPPORT);
this.confidence = (confiMatcher.matches() && confiMatcher.groupCount() > 0 ? Double.valueOf(confiMatcher.group(1)) : Constraint.DEFAULT_CONFIDENCE);
this.interestFactor = (inteFaMatcher.matches() && inteFaMatcher.groupCount() > 0 ? Double.valueOf(inteFaMatcher.group(1)): Constraint.DEFAULT_INTEREST_FACTOR);
}
public DeclareConstraintTransferObject(ConstraintPojo pojo) {
/* Search within all possible MINERFul templates */
Class<? extends Constraint> givenMinerFulTemplate = StringToLowerCaseAlphanumToTemplateTranslator.translateTemplateName(pojo.template);
if (givenMinerFulTemplate != null) {
this.minerFulTemplate = givenMinerFulTemplate;
this.declareMapTemplate = DeclareMapToMinerFulTemplatesTranslator.translateTemplateName(this.minerFulTemplate);
} else {
/* Search within all possible DeclareMap templates */
this.declareMapTemplate = DeclareMapTemplate.fromName(pojo.template);
if (this.declareMapTemplate != null) {
this.minerFulTemplate = DeclareMapToMinerFulTemplatesTranslator.translateTemplateName(this.declareMapTemplate);
} else {
throw new IllegalArgumentException("Requested Declare template " + pojo.template + " does not exist.");
}
}
this.parameters = pojo.parameters;
this.support = pojo.support;
this.confidence = pojo.confidence;
this.interestFactor = pojo.interestFactor;
}
public ConstraintPojo toPojo() {
ConstraintPojo pojo = new ConstraintPojo();
pojo.template = MetaConstraintUtils.getTemplateName(this.minerFulTemplate);
pojo.parameters = this.parameters;
pojo.support = this.support;
pojo.confidence = this.confidence;
pojo.interestFactor = this.interestFactor;
return pojo;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("DeclareConstraintTransferObject [declareMapTemplate=");
builder.append(declareMapTemplate);
builder.append(", minerfulTemplate=");
builder.append(minerFulTemplate);
builder.append(", parameters=");
builder.append(parameters);
builder.append(", support=");
builder.append(support);
builder.append(", confidence=");
builder.append(confidence);
builder.append(", interestFactor=");
builder.append(interestFactor);
builder.append("]");
return builder.toString();
}
public Set<String> getAllParamsTasks() {
Set<String> allParamsTasks = new TreeSet<String>();
if (this.parameters != null) {
for (Set<String> paramTasks : this.parameters) {
allParamsTasks.addAll(paramTasks);
}
}
return allParamsTasks;
}
@Override
public int compareTo(DeclareConstraintTransferObject o) {
int result = 0;
result = this.declareMapTemplate.compareTo(o.declareMapTemplate);
if (result == 0) {
result = this.minerFulTemplate.getName().compareTo(o.minerFulTemplate.getName());
if (result == 0) {
/* Compare the parameters' sizes */
for (int i = 0; i < this.parameters.size() && result == 0; i++) {
if (this.parameters.get(i) == null) {
if (o.parameters.get(i) != null) {
return 1;
}
} else {
if (o.parameters.get(i) == null) {
return -1;
}
}
result = new Integer(this.parameters.get(i).size()).compareTo(o.parameters.get(i).size());
/* Compare the respective parameters' tasks */
if (result == 0) {
Iterator<String>
thisParamsIterator = this.parameters.get(i).iterator(),
oParamsIterator = o.parameters.get(i).iterator();
while (thisParamsIterator.hasNext() && result == 0) {
result = thisParamsIterator.next().compareTo(oParamsIterator.next());
}
}
}
}
}
return result;
}
} | 6,913 | 37.411111 | 161 | java |
Janus | Janus-master/src/minerful/io/encdec/ProcessModelEncoderDecoder.java | package minerful.io.encdec;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringWriter;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.PropertyException;
import javax.xml.bind.Unmarshaller;
import javax.xml.bind.ValidationEvent;
import javax.xml.bind.ValidationEventHandler;
import minerful.concept.ProcessModel;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskChar;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.io.encdec.json.JsonPojoEncoderDecoder;
import minerful.io.encdec.pojo.ProcessModelPojo;
import minerful.logparser.LogParser;
import com.google.gson.JsonIOException;
import com.google.gson.JsonSyntaxException;
/**
* This class marshals and unmarshals process models to/from XML files.
* It also reads and saves process models in JSON format to/from JSON-formatted text files and strings.
* @author Claudio Di Ciccio
*
*/
public class ProcessModelEncoderDecoder {
/**
* Changes the identifier of the
* {@link TaskChar TaskChar}
* elements in the given process model according to the encoding of the event log.
* Notice that it does so as a side effect on the original process model passed in input and on the
* {@link TaskChar TaskChar} elements themselves.
* @param processModel A process model
* @param logPar An event log parser
* @return The process model having the {@link TaskChar TaskChar} re-encoded according to the event log identifiers
*/
public ProcessModel reEncodeTaskCharsAccordingToEventLog(ProcessModel processModel, LogParser logPar) {
logPar.getEventEncoderDecoder().mergeWithConstraintsAndUpdateTheirParameters(
processModel.getAllConstraints().toArray(new Constraint[processModel.howManyConstraints()]));
return processModel;
}
public ProcessModel unmarshalProcessModel(File procSchmInFile) throws JAXBException, PropertyException, FileNotFoundException,
IOException {
String pkgName = ProcessModel.class.getCanonicalName().toString();
pkgName = pkgName.substring(0, pkgName.lastIndexOf('.'));
JAXBContext jaxbCtx = JAXBContext.newInstance(pkgName);
Unmarshaller unmarsh = jaxbCtx.createUnmarshaller();
unmarsh.setEventHandler(
new ValidationEventHandler() {
public boolean handleEvent(ValidationEvent event) {
throw new RuntimeException(event.getMessage(),
event.getLinkedException());
}
});
ProcessModel proMod = (ProcessModel) unmarsh.unmarshal(procSchmInFile);
MetaConstraintUtils.createHierarchicalLinks(proMod.getAllConstraints());
return proMod;
}
public StringBuffer marshalProcessModel(ProcessModel processModel)
throws JAXBException, PropertyException, FileNotFoundException, IOException {
String pkgName = processModel.getClass().getCanonicalName().toString();
pkgName = pkgName.substring(0, pkgName.lastIndexOf('.'));
JAXBContext jaxbCtx = JAXBContext.newInstance(pkgName);
Marshaller marsh = jaxbCtx.createMarshaller();
marsh.setProperty("jaxb.formatted.output", true);
StringWriter strixWriter = new StringWriter();
marsh.marshal(processModel, strixWriter);
strixWriter.flush();
StringBuffer strixBuffer = strixWriter.getBuffer();
return strixBuffer;
}
public void marshalProcessModel(ProcessModel processModel, File procSchmOutFile)
throws JAXBException, PropertyException, FileNotFoundException, IOException {
StringBuffer strixBuffer = this.marshalProcessModel(processModel);
// OINK
// strixBuffer.replace(
// strixBuffer.indexOf(">", strixBuffer.indexOf("?>") + 3),
// strixBuffer.indexOf(">", strixBuffer.indexOf("?>") + 3),
// " xmlns=\"" + ProcessModel.MINERFUL_XMLNS + "\"");
FileWriter strixFileWriter = new FileWriter(procSchmOutFile);
strixFileWriter.write(strixBuffer.toString());
strixFileWriter.flush();
strixFileWriter.close();
}
public ProcessModel readFromJsonFile(File processModelJsonFile) throws JsonSyntaxException, JsonIOException, FileNotFoundException {
JsonPojoEncoderDecoder jsonPojoMgr = new JsonPojoEncoderDecoder();
ProcessModelPojo pojo = jsonPojoMgr.fromJsonToProcessModelPojo(processModelJsonFile);
ProcessModelTransferObject proModTO = new ProcessModelTransferObject(pojo);
TransferObjectToProcessModelTranslator translator = new TransferObjectToProcessModelTranslator();
return translator.createProcessModel(proModTO);
}
/**
* read a process model from a Json file with the guaranties to respect the given encoding-mapping
*
* @param processModelJsonFile
* @param alphabet encoding-mapping
* @return
* @throws JsonSyntaxException
* @throws JsonIOException
* @throws FileNotFoundException
*/
public ProcessModel readFromJsonFile(File processModelJsonFile, TaskCharArchive alphabet) throws JsonSyntaxException, JsonIOException, FileNotFoundException {
JsonPojoEncoderDecoder jsonPojoMgr = new JsonPojoEncoderDecoder();
ProcessModelPojo pojo = jsonPojoMgr.fromJsonToProcessModelPojo(processModelJsonFile);
ProcessModelTransferObject proModTO = new ProcessModelTransferObject(pojo);
TransferObjectToProcessModelTranslator translator = new TransferObjectToProcessModelTranslator();
return translator.createProcessModel(proModTO, alphabet);
}
public void writeToJsonFile(ProcessModel processModel, File processModelJsonFile) throws JsonSyntaxException, JsonIOException, FileNotFoundException {
ProcessModelTransferObject proModTO = new ProcessModelTransferObject(processModel);
ProcessModelPojo pojo = proModTO.toPojo();
JsonPojoEncoderDecoder jsonPojoMgr = new JsonPojoEncoderDecoder();
jsonPojoMgr.saveProcessModelPojo(pojo, processModelJsonFile);
return;
}
public ProcessModel readFromJsonString(String processModelJson) throws JsonSyntaxException, JsonIOException, FileNotFoundException {
JsonPojoEncoderDecoder jsonPojoMgr = new JsonPojoEncoderDecoder();
ProcessModelPojo pojo = jsonPojoMgr.fromJsonToProcessModelPojo(processModelJson);
ProcessModelTransferObject proModTO = new ProcessModelTransferObject(pojo);
TransferObjectToProcessModelTranslator translator = new TransferObjectToProcessModelTranslator();
return translator.createProcessModel(proModTO);
}
public String toJsonString(ProcessModel processModel) throws JsonSyntaxException, JsonIOException, FileNotFoundException {
ProcessModelTransferObject proModTO = new ProcessModelTransferObject(processModel);
ProcessModelPojo pojo = proModTO.toPojo();
JsonPojoEncoderDecoder jsonPojoMgr = new JsonPojoEncoderDecoder();
return jsonPojoMgr.fromProcessModelPojoToJson(pojo);
}
} | 6,773 | 43.860927 | 159 | java |
Janus | Janus-master/src/minerful/io/encdec/ProcessModelTransferObject.java | package minerful.io.encdec;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.constraint.Constraint;
import minerful.io.encdec.pojo.ConstraintPojo;
import minerful.io.encdec.pojo.ProcessModelPojo;
public class ProcessModelTransferObject {
public final String name;
public final Set<String> tasks;
public final Set<DeclareConstraintTransferObject> constraints;
public ProcessModelTransferObject(ProcessModelPojo modelPojo) {
this.name = modelPojo.name;
this.constraints = new TreeSet<DeclareConstraintTransferObject>();
this.tasks = new TreeSet<String>();
if (modelPojo.tasks == null || modelPojo.tasks.size() == 0) {
if (modelPojo.constraints.size() > 0) {
for (ConstraintPojo pojo : modelPojo.constraints) {
for (Set<String> paramSet : pojo.parameters) {
for (String param : paramSet) {
this.tasks.add(param);
}
}
}
}
} else {
this.tasks.addAll(modelPojo.tasks);
}
if (modelPojo.constraints != null) {
for (ConstraintPojo conPojo : modelPojo.constraints) {
this.constraints.add(new DeclareConstraintTransferObject(conPojo));
}
}
}
public ProcessModelTransferObject(ProcessModel proMod) {
this.name = proMod.getName();
this.tasks = new HashSet<String>(proMod.howManyTasks(), (float)1.0);
for (TaskChar taskChar: proMod.getTasks()) {
this.tasks.add(taskChar.getName());
}
this.constraints = new HashSet<DeclareConstraintTransferObject>(proMod.howManyUnmarkedConstraints(), (float)1.0);
for (Constraint con: proMod.getAllUnmarkedConstraints()) {
this.constraints.add(new DeclareConstraintTransferObject(con));
}
}
public ProcessModelPojo toPojo() {
ProcessModelPojo pojo = new ProcessModelPojo();
pojo.name = this.name;
pojo.tasks = this.tasks;
pojo.constraints = new HashSet<ConstraintPojo>(this.constraints.size(), (float)1.0);
for (DeclareConstraintTransferObject conTO : this.constraints) {
pojo.constraints.add(conTO.toPojo());
}
return pojo;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("ProcessModelTransferObject [name=");
builder.append(name);
builder.append(", tasks=");
builder.append(tasks);
builder.append(", constraints=");
builder.append(constraints);
builder.append("]");
return builder.toString();
}
} | 2,449 | 29.246914 | 115 | java |
Janus | Janus-master/src/minerful/io/encdec/StringToLowerCaseAlphanumToTemplateTranslator.java | package minerful.io.encdec;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
public class StringToLowerCaseAlphanumToTemplateTranslator {
public static Map<String, Class<? extends Constraint>> LOWERCASE_ALPHANUM_CONSTRAINT_TEMPLATE_NAMES =
new HashMap<String, Class<? extends Constraint>>(MetaConstraintUtils.ALL_CONSTRAINT_TEMPLATE_NAMES_MAP.keySet().size(), (float)1.0);
public static Class<? extends Constraint> translateTemplateName(String templateName) {
populateTemplateNamesMapIfEmpty();
templateName = makeLowercaseOnlyAlphanum(templateName);
return LOWERCASE_ALPHANUM_CONSTRAINT_TEMPLATE_NAMES.get(templateName);
}
public static boolean containsTemplateName(String templateName) {
populateTemplateNamesMapIfEmpty();
templateName = makeLowercaseOnlyAlphanum(templateName);
return LOWERCASE_ALPHANUM_CONSTRAINT_TEMPLATE_NAMES.containsKey(templateName);
}
public static String makeLowercaseOnlyAlphanum(String templateName) {
return templateName.replaceAll("[^\\p{Alpha}\\p{Digit}]", "").toLowerCase();
}
private static void populateTemplateNamesMapIfEmpty() {
if (LOWERCASE_ALPHANUM_CONSTRAINT_TEMPLATE_NAMES.isEmpty()) {
for (Entry<String, Class<? extends Constraint>> entry : MetaConstraintUtils.ALL_CONSTRAINT_TEMPLATE_NAMES_MAP.entrySet()) {
LOWERCASE_ALPHANUM_CONSTRAINT_TEMPLATE_NAMES.put(makeLowercaseOnlyAlphanum(entry.getKey()), entry.getValue());
}
}
}
}
| 1,550 | 39.815789 | 135 | java |
Janus | Janus-master/src/minerful/io/encdec/TaskCharEncoderDecoder.java | package minerful.io.encdec;
import java.util.*;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
import minerful.logparser.CharTaskClass;
import minerful.logparser.StringTaskClass;
import org.apache.log4j.Logger;
/*
0030-0039 [numbers] 9
0041-005A [u.c. basic latin] + 25 = 34
0061-007A [l.c. basic latin] + 25 = 59
00C0-00FF [latin supplement] + 63 = 122
0100-017F [latin extended-a] +127 = 249
1E00-1EFF [latin extended-b] +255 = 504
2C60-2C7F [latin-extended-c] + 31 = 535
A720-A78E [latin-extended-d-1] +110 = 645
A7A0-A7AA [latin-extended-d-2] + 10 = 655
A7F8-A7FF [latin-extended-d-3] + 7 = 662
061E-064A [arabic-1] + 44 = 706
0660-066F [arabic-2] + 16 = 722
0671-06D5 [arabic-3] +100 = 822
3041-3096 [hiragana] + 85 = 907
30A0-30FF [katakana] +100 =1007
16F00-16F44 [miao.. miao??] + 68 =1075
*/
public class TaskCharEncoderDecoder {
public static final char CONTEMPORANEITY_CHARACTER_DELIMITER = '|';
public static final int UNICODE_NUMBERS_LOWER_BOUND = 0x0030;
public static final int UNICODE_NUMBERS_UPPER_BOUND = 0x0039;
public static final int UNICODE_BASIC_LATIN_UC_LOWER_BOUND = 0x0041;
public static final int UNICODE_BASIC_LATIN_UC_UPPER_BOUND = 0x005A;
public static final int UNICODE_BASIC_LATIN_LC_LOWER_BOUND = 0x0061;
public static final int UNICODE_BASIC_LATIN_LC_UPPER_BOUND = 0x007A;
public static final int UNICODE_LATIN_SUPPLEMENT_LOWER_BOUND = 0x00C0;
public static final int UNICODE_LATIN_SUPPLEMENT_UPPER_BOUND = 0x00FF;
public static final int UNICODE_EXT_LATIN_A_LOWER_BOUND = 0x0100;
public static final int UNICODE_EXT_LATIN_A_UPPER_BOUND = 0x017F;
public static final int UNICODE_EXT_LATIN_B_LOWER_BOUND = 0x1E00;
public static final int UNICODE_EXT_LATIN_B_UPPER_BOUND = 0x1EFF;
public static final int UNICODE_EXT_LATIN_C_LOWER_BOUND = 0x2C60;
public static final int UNICODE_EXT_LATIN_C_UPPER_BOUND = 0x2C7F;
public static final int UNICODE_EXT_LATIN_D_1_LOWER_BOUND = 0xA720;
public static final int UNICODE_EXT_LATIN_D_1_UPPER_BOUND = 0xA78E;
public static final int UNICODE_EXT_LATIN_D_2_LOWER_BOUND = 0xA7A0;
public static final int UNICODE_EXT_LATIN_D_2_UPPER_BOUND = 0xA7AA;
public static final int UNICODE_EXT_LATIN_D_3_LOWER_BOUND = 0xA7F8;
public static final int UNICODE_EXT_LATIN_D_3_UPPER_BOUND = 0xA7FF;
public static final int UNICODE_ARABIC_1_LOWER_BOUND = 0x061E;
public static final int UNICODE_ARABIC_1_UPPER_BOUND = 0x064A;
public static final int UNICODE_ARABIC_2_LOWER_BOUND = 0x0660;
public static final int UNICODE_ARABIC_2_UPPER_BOUND = 0x066F;
public static final int UNICODE_ARABIC_3_LOWER_BOUND = 0x0671;
public static final int UNICODE_ARABIC_3_UPPER_BOUND = 0x06D5;
public static final int UNICODE_ARABIC_4_LOWER_BOUND = 0x0660;
public static final int UNICODE_ARABIC_4_UPPER_BOUND = 0x066F;
public static final int UNICODE_HIRAGANA_LOWER_BOUND = 0x3041;
public static final int UNICODE_HIRAGANA_UPPER_BOUND = 0x3096;
public static final int UNICODE_KATAKANA_LOWER_BOUND = 0x30A0;
public static final int UNICODE_KATAKANA_UPPER_BOUND = 0x30FF;
public static final int UNICODE_MIAO_LOWER_BOUND = 0x16F00;
public static final int UNICODE_MIAO_UPPER_BOUND = 0x16F44;
public static final int[] LOWER_BOUNDS = {
UNICODE_BASIC_LATIN_UC_LOWER_BOUND,
UNICODE_BASIC_LATIN_LC_LOWER_BOUND,
UNICODE_LATIN_SUPPLEMENT_LOWER_BOUND,
UNICODE_EXT_LATIN_A_LOWER_BOUND,
UNICODE_EXT_LATIN_B_LOWER_BOUND,
UNICODE_EXT_LATIN_C_LOWER_BOUND,
UNICODE_EXT_LATIN_D_1_LOWER_BOUND,
UNICODE_EXT_LATIN_D_2_LOWER_BOUND,
UNICODE_EXT_LATIN_D_3_LOWER_BOUND,
UNICODE_ARABIC_1_LOWER_BOUND,
UNICODE_ARABIC_2_LOWER_BOUND,
UNICODE_ARABIC_3_LOWER_BOUND,
UNICODE_ARABIC_4_LOWER_BOUND,
UNICODE_HIRAGANA_LOWER_BOUND,
UNICODE_KATAKANA_LOWER_BOUND,
UNICODE_NUMBERS_LOWER_BOUND,
// UNICODE_MIAO_LOWER_BOUND,
};
public static final int[] UPPER_BOUNDS = {
UNICODE_BASIC_LATIN_UC_UPPER_BOUND,
UNICODE_BASIC_LATIN_LC_UPPER_BOUND,
UNICODE_LATIN_SUPPLEMENT_UPPER_BOUND,
UNICODE_EXT_LATIN_A_UPPER_BOUND,
UNICODE_EXT_LATIN_B_UPPER_BOUND,
UNICODE_EXT_LATIN_C_UPPER_BOUND,
UNICODE_EXT_LATIN_D_1_UPPER_BOUND,
UNICODE_EXT_LATIN_D_2_UPPER_BOUND,
UNICODE_EXT_LATIN_D_3_UPPER_BOUND,
UNICODE_ARABIC_1_UPPER_BOUND,
UNICODE_ARABIC_2_UPPER_BOUND,
UNICODE_ARABIC_3_UPPER_BOUND,
UNICODE_ARABIC_4_UPPER_BOUND,
UNICODE_HIRAGANA_UPPER_BOUND,
UNICODE_KATAKANA_UPPER_BOUND,
UNICODE_NUMBERS_UPPER_BOUND,
// UNICODE_MIAO_UPPER_BOUND,
};
public static final int encodableTasksNumber() {
int total = 0;
for (int i = 0; i < UPPER_BOUNDS.length && i < LOWER_BOUNDS.length; i++) {
total += UPPER_BOUNDS[i] - LOWER_BOUNDS[i];
}
return total;
}
public static final AbstractTaskClass[] TEST_TASK_CLASSES = {new StringTaskClass("deliverable"), new StringTaskClass("package"), new StringTaskClass("wp"),
new StringTaskClass("meeting"), new StringTaskClass("deadline"), new StringTaskClass("task force"), new StringTaskClass("submission"), new StringTaskClass("report"),
new StringTaskClass("demo"), new StringTaskClass("contribution"), new StringTaskClass("project"), new StringTaskClass("timeline"), new StringTaskClass("presentation"),
new StringTaskClass("agenda"), new StringTaskClass("timetable"), new StringTaskClass("slide"), new StringTaskClass("integration"), new StringTaskClass("iteration"),
new StringTaskClass("release"), new StringTaskClass("requirement"), new StringTaskClass("review"), new StringTaskClass("reviewer"), new StringTaskClass("agreement"),
new StringTaskClass("interaction"), new StringTaskClass("logistics"), new StringTaskClass("payment"), new StringTaskClass("paper"), new StringTaskClass("video"),
new StringTaskClass("commitment"), new StringTaskClass("draft"), new StringTaskClass("call"), new StringTaskClass("publication"), new StringTaskClass("proposal"),
new StringTaskClass("document"), new StringTaskClass("invitation"), new StringTaskClass("update"), new StringTaskClass("status"), new StringTaskClass("cost"), new StringTaskClass("step"),
new StringTaskClass("version"), new StringTaskClass("frame"), new StringTaskClass("introduction"), new StringTaskClass("finance"), new StringTaskClass("management"),
new StringTaskClass("form"), new StringTaskClass("comment"), new StringTaskClass("strategy"), new StringTaskClass("final"), new StringTaskClass("periodic"), new StringTaskClass("dow"), new StringTaskClass("note"),
new StringTaskClass("objective"),
new StringTaskClass("change"),
new StringTaskClass("showcase"),
new StringTaskClass("issue"),
new StringTaskClass("activity"),
// here I start inventing!
new StringTaskClass("invention"), new StringTaskClass("innovation"), new StringTaskClass("html"), new StringTaskClass("css"), new StringTaskClass("xhtml"), new StringTaskClass("jquery"), new StringTaskClass("php"),
new StringTaskClass("java"), new StringTaskClass("c++"), new StringTaskClass("python"), new StringTaskClass("div"), new StringTaskClass("love"), new StringTaskClass("merry"), new StringTaskClass("christmas"),
new StringTaskClass("brandybuck"), new StringTaskClass("frodo"), new StringTaskClass("rings"), new StringTaskClass("sonic"), new StringTaskClass("eggman"), new StringTaskClass("kukukukchu"),
new StringTaskClass("failure"), new StringTaskClass("cover"), new StringTaskClass("rehearsal"), new StringTaskClass("circle"), new StringTaskClass("artist"), new StringTaskClass("wallet"),
new StringTaskClass("steal"), new StringTaskClass("driving"), new StringTaskClass("license"), new StringTaskClass("avis"), new StringTaskClass("trouble"), new StringTaskClass("avignon"),
new StringTaskClass("birthday"), new StringTaskClass("princess"), new StringTaskClass("castle")};
public static final Character WILDCARD_CHAR = '_';
public static final String WILDCARD_STRING = "*";
private TreeMap<AbstractTaskClass, Character> tasksDictionary;
private TreeMap<Character, AbstractTaskClass> inverseTasksDictionary;
private int charCursor, tasksCursor, boundCursor;
private static Logger logger;
public TaskCharEncoderDecoder() {
this.charCursor = 0;
this.tasksCursor = 0;
this.boundCursor = 0;
this.tasksDictionary = new TreeMap<AbstractTaskClass, Character>();
this.inverseTasksDictionary = new TreeMap<Character, AbstractTaskClass>();
if (logger == null) {
logger = Logger.getLogger(this.getClass().getCanonicalName());
}
}
public Map<Character, AbstractTaskClass> getTranslationMap() {
return new HashMap<Character, AbstractTaskClass>(this.inverseTasksDictionary);
}
/**
* Returns a string representation of the decoding map, from single-character identifier to task.
*
* @return A string representation of the decoding map.
*/
public String printDecodingMap() {
StringBuilder sBuil = new StringBuilder();
sBuil.append("Deconding map. Read:\n"
+ " <key> => <value>\n");
for (Character key : this.inverseTasksDictionary.keySet()) {
sBuil.append(" ");
sBuil.append(key);
sBuil.append(" => ");
sBuil.append(this.inverseTasksDictionary.get(key));
sBuil.append("\n");
}
return sBuil.toString();
}
public static final Map<Character, AbstractTaskClass> getTranslationMap(TaskChar... tasks) {
HashMap<Character, AbstractTaskClass> transMap = new HashMap<Character, AbstractTaskClass>(tasks.length, (float) 1.0);
for (TaskChar task : tasks) {
transMap.put(task.identifier, task.taskClass);
}
return transMap;
}
public static final Map<Character, AbstractTaskClass> getTranslationMap(Set<TaskChar> tasks) {
HashMap<Character, AbstractTaskClass> transMap = new HashMap<Character, AbstractTaskClass>(tasks.size(), (float) 1.0);
for (TaskChar task : tasks) {
transMap.put(task.identifier, task.taskClass);
}
return transMap;
}
public Map<AbstractTaskClass, Character> getInverseTranslationMap() {
return new HashMap<AbstractTaskClass, Character>(this.tasksDictionary);
}
public static NavigableMap<Character, AbstractTaskClass> getTranslationMap(ConstraintsBag bag) {
NavigableMap<Character, AbstractTaskClass> transMap = new TreeMap<Character, AbstractTaskClass>();
for (TaskChar tChr : bag.getTaskChars()) {
transMap.put(tChr.identifier, tChr.taskClass);
}
return transMap;
}
@Deprecated
public static Character[] faultyEncode(String[] tasks) {
Character[] encodedTasks = new Character[tasks.length];
int i = 0;
for (String task : tasks) {
encodedTasks[i++] = task.charAt(0);
}
return encodedTasks;
}
/**
* Records the encoding of the passed task chars.
*
* @param taskChars
* @return
*/
public Character[] encode(Collection<TaskChar> taskChars) {
AbstractTaskClass[] taskClasses = new AbstractTaskClass[taskChars.size()];
int i = 0;
for (TaskChar tCh : taskChars) {
taskClasses[i++] = tCh.taskClass;
}
return encode(taskClasses);
}
public Character[] encode(AbstractTaskClass... taskClasses) {
Character[] encodedTasks = new Character[0];
Class<? extends AbstractTaskClass> taskClassType = null;
for (AbstractTaskClass tkC : taskClasses) {
if (taskClassType == null)
taskClassType = tkC.getClass();
else if (!taskClassType.equals(tkC.getClass())) {
throw new IllegalArgumentException("All tasks must be classified by the same criterion");
}
}
this.tasksCursor = 0; // @alessio reset task cursor
if (taskClassType.equals(CharTaskClass.class)) {
CharTaskClass chTkClass = null;
// Encoding is not really needed
while (tasksCursor < taskClasses.length) {
chTkClass = ((CharTaskClass) taskClasses[tasksCursor]);
tasksDictionary.put(taskClasses[tasksCursor], chTkClass.charClass);
inverseTasksDictionary.put(chTkClass.charClass, taskClasses[tasksCursor]);
tasksCursor++;
}
} else {
while (tasksCursor < taskClasses.length && boundCursor < LOWER_BOUNDS.length
&& boundCursor < UPPER_BOUNDS.length) {
charCursor = Math.max(charCursor, LOWER_BOUNDS[boundCursor]);
for (; tasksCursor < taskClasses.length
&& charCursor < UPPER_BOUNDS[boundCursor]; tasksCursor++) {
if (tasksDictionary.containsKey(taskClasses[tasksCursor])) continue; // @Alessio in this way we can keep an already initialized encoding
tasksDictionary.put(taskClasses[tasksCursor],
Character.valueOf((char) charCursor));
inverseTasksDictionary.put(
Character.valueOf((char) charCursor),
taskClasses[tasksCursor]);
charCursor++;
}
if (tasksCursor < taskClasses.length) {
boundCursor++;
}
}
if (tasksCursor < taskClasses.length)
throw new UnsupportedOperationException("The method was not able"
+ " to encode the whole collection of tasks");
}
return inverseTasksDictionary.keySet().toArray(encodedTasks);
}
public Character encode(AbstractTaskClass taskClass) {
if (taskClass == null) {
logger.error("A task is identified by a NULL value: skipping this task");
return null;
} else if (taskClass.toString().length() == 0) {
logger.warn("A task is identified by an empty string");
}
// If the tasks dictionary already contains this task, skip this!
if (!this.tasksDictionary.containsKey(taskClass)) {
// If the bound was not reached for the current translation group,
// skip this!
if (charCursor >= UPPER_BOUNDS[boundCursor]) {
// If we have no more translation groups left, we're in a
// trouble!
if (boundCursor < LOWER_BOUNDS.length - 1
&& boundCursor < UPPER_BOUNDS.length - 1) {
boundCursor++;
charCursor = LOWER_BOUNDS[boundCursor];
} else {
throw new UnsupportedOperationException(
"The method was not able " +
"to encode the whole collection of tasks. " +
"Currently, only " +
encodableTasksNumber() +
" can be encoded.");
}
} else if (charCursor == 0) {
charCursor = LOWER_BOUNDS[boundCursor];
}
tasksDictionary.put(taskClass, Character.valueOf((char) charCursor));
inverseTasksDictionary.put(Character.valueOf((char) charCursor),
taskClass);
charCursor++;
}
return this.tasksDictionary.get(taskClass);
}
/**
* Encodes a list of lists of task classes into a set of strings, where each character encodes a single event class.
*
* @param tasksTraces An event log
* @return A list of strings (one per trace in the log)
*/
public String[] encode(List<List<AbstractTaskClass>> tasksTraces) {
String[] stringsTracesArray = new String[0];
List<String> stringTraces = new ArrayList<String>(tasksTraces.size());
StringBuilder striTraBuilder = new StringBuilder();
Character c = null;
for (List<AbstractTaskClass> tasksTrace : tasksTraces) {
striTraBuilder.delete(0, striTraBuilder.length());
for (AbstractTaskClass task : tasksTrace) {
c = this.encode(task);
striTraBuilder.append(c);
}
stringTraces.add(striTraBuilder.toString());
}
stringsTracesArray = stringTraces.toArray(stringsTracesArray);
return stringsTracesArray;
}
public AbstractTaskClass decode(Character encodedTask) {
return this.inverseTasksDictionary.get(encodedTask);
}
public AbstractTaskClass[] decode(Character[] charArray) {
AbstractTaskClass[] taskClassesArray = new AbstractTaskClass[charArray.length];
for (int i = 0; i < charArray.length; i++) {
taskClassesArray[i] = this.decode(charArray[i]);
}
return taskClassesArray;
}
public AbstractTaskClass[] decode(String charString) {
Character[] charArray = new Character[charString.length()];
int i = 0;
for (char character : charString.toCharArray()) {
charArray[i++] = character;
}
return this.decode(charArray);
}
public static char encodedCharFromString(String encodedCharString) {
return
encodedCharString.startsWith("\\u")
? (char) (Integer.parseInt(encodedCharString.substring(2), 16))
: encodedCharString.charAt(0);
}
public Set<AbstractTaskClass> getTaskClasses() {
return this.tasksDictionary.keySet();
}
@Override
public String toString() {
StringBuffer sBuf = new StringBuffer();
for (AbstractTaskClass taskClass : tasksDictionary.keySet()) {
sBuf.append(tasksDictionary.get(taskClass));
sBuf.append(" <= ");
sBuf.append(taskClass);
sBuf.append(" (");
sBuf.append(taskClass);
sBuf.append(")\n");
}
return sBuf.toString();
}
public Character[] encodedTasks() {
return this.inverseTasksDictionary.keySet().toArray(new Character[0]);
}
public String[] decodedTasks() {
return this.tasksDictionary.keySet().toArray(new String[0]);
}
public static void main(String[] args) {
TaskCharEncoderDecoder taChEnDe = new TaskCharEncoderDecoder();
taChEnDe.encode(TEST_TASK_CLASSES);
logger.debug(taChEnDe);
}
public static String replaceNonWordCharacters(String originalString) {
return originalString.replaceAll("\\W", "_");
}
public Collection<AbstractTaskClass> excludeThese(Collection<String> activitiesToExcludeFromResult) {
AbstractTaskClass excludedTask = null;
Collection<AbstractTaskClass> excludedTasks = new ArrayList<AbstractTaskClass>(activitiesToExcludeFromResult.size());
if (activitiesToExcludeFromResult != null) {
for (String activityToExclude : activitiesToExcludeFromResult) {
excludedTask = this.removeFromTranslationMap(activityToExclude);
if (excludedTask != null) {
excludedTasks.add(excludedTask);
} else {
logger.warn("A non-existing activity was requested to be removed from the alphabet: " + excludedTask);
}
}
}
return excludedTasks;
}
private AbstractTaskClass removeFromTranslationMap(String activityToExclude) {
Character charToRemove = null;
for (AbstractTaskClass key : this.tasksDictionary.keySet()) {
if (key.toString().equals(activityToExclude)) {
charToRemove = tasksDictionary.remove(key);
inverseTasksDictionary.remove(charToRemove);
return key;
}
}
return null;
}
/**
* Includes the tasks from the constraints in the managed set.
* As a side effect, it replaces the existing index characters of the constraints' parameters with new ones.
*
* @param constraints Constraints from which TaskChars are extracted
*/
public void mergeWithConstraintsAndUpdateTheirParameters(Constraint... constraints) {
char charId = Character.END_PUNCTUATION;
for (Constraint con : constraints) {
for (TaskCharSet taChSet : con.getParameters()) {
for (TaskChar taChar : taChSet.getTaskCharsArray()) {
charId = this.encode(taChar.taskClass);
taChar.identifier = charId;
}
taChSet.refreshListOfIdentifiers();
}
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TaskCharEncoderDecoder that = (TaskCharEncoderDecoder) o;
return charCursor == that.charCursor && tasksDictionary.equals(that.tasksDictionary) && inverseTasksDictionary.equals(that.inverseTasksDictionary);
}
@Override
public int hashCode() {
return Objects.hash(tasksDictionary, inverseTasksDictionary, charCursor);
}
} | 22,096 | 44.095918 | 226 | java |
Janus | Janus-master/src/minerful/io/encdec/TransferObjectToConstraintTranslator.java | package minerful.io.encdec;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharSetFactory;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
public class TransferObjectToConstraintTranslator {
private TaskCharSetFactory taskCharSetFactory;
public TransferObjectToConstraintTranslator(TaskCharArchive taskCharArchive) {
this.taskCharSetFactory = new TaskCharSetFactory(taskCharArchive);
}
public Constraint createConstraint(DeclareConstraintTransferObject conTO) {
if (conTO.minerFulTemplate != null) {
Constraint minerFulConstraint =
MetaConstraintUtils.makeConstraint(
conTO.minerFulTemplate,
this.taskCharSetFactory.createSetsFromTaskStringsCollection(
conTO.parameters
)
);
if (conTO.support != null) {
minerFulConstraint.setSupport(conTO.support);
}
if (conTO.confidence != null) {
minerFulConstraint.setConfidence(conTO.confidence);
}
if (conTO.interestFactor != null) {
minerFulConstraint.setInterestFactor(conTO.interestFactor);
}
return minerFulConstraint;
}
return null;
}
} | 1,142 | 29.891892 | 79 | java |
Janus | Janus-master/src/minerful/io/encdec/TransferObjectToProcessModelTranslator.java | package minerful.io.encdec;
import java.util.Set;
import java.util.TreeSet;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharFactory;
import minerful.concept.constraint.ConstraintsBag;
public class TransferObjectToProcessModelTranslator {
public TransferObjectToProcessModelTranslator() {}
/**
* Create a process model from a Json file with the guaranties to respect the given encoding-mapping
* @param proModTO
* @param alphabet encoding-mapping
* @return
*/
public ProcessModel createProcessModel(ProcessModelTransferObject proModTO, TaskCharArchive alphabet) {
TaskCharEncoderDecoder alphabetEncoder= new TaskCharEncoderDecoder();
alphabetEncoder.encode(alphabet.getTaskChars());
/* Create/update the TaskCharArchive */
TaskCharFactory taskCharFactory = new TaskCharFactory(alphabetEncoder);
Set<TaskChar> taskChars = new TreeSet<TaskChar>();
for (String taskName : proModTO.tasks) {
taskChars.add(taskCharFactory.makeTaskChar(taskName));
}
TaskCharArchive taskCharArchive = new TaskCharArchive(taskChars);
/* Create the constraints translator */
TransferObjectToConstraintTranslator conTranslator = new TransferObjectToConstraintTranslator(taskCharArchive);
ConstraintsBag bag = new ConstraintsBag(taskChars);
for(DeclareConstraintTransferObject conTO : proModTO.constraints) {
bag.add(conTranslator.createConstraint(conTO));
}
return new ProcessModel(taskCharArchive, bag, proModTO.name);
}
public ProcessModel createProcessModel(ProcessModelTransferObject proModTO) {
/* Create/update the TaskCharArchive */
TaskCharFactory taskCharFactory = new TaskCharFactory();
Set<TaskChar> taskChars = new TreeSet<TaskChar>();
for (String taskName : proModTO.tasks) {
taskChars.add(taskCharFactory.makeTaskChar(taskName));
}
TaskCharArchive taskCharArchive = new TaskCharArchive(taskChars);
/* Create the constraints translator */
TransferObjectToConstraintTranslator conTranslator = new TransferObjectToConstraintTranslator(taskCharArchive);
ConstraintsBag bag = new ConstraintsBag(taskChars);
for(DeclareConstraintTransferObject conTO : proModTO.constraints) {
bag.add(conTranslator.createConstraint(conTO));
}
return new ProcessModel(taskCharArchive, bag, proModTO.name);
}
} | 2,366 | 37.177419 | 113 | java |
Janus | Janus-master/src/minerful/io/encdec/csv/CsvEncoder.java | package minerful.io.encdec.csv;
import java.util.Collection;
import java.util.Locale;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.constraint.Constraint;
public class CsvEncoder {
public enum PRINT_OUT_ELEMENT implements Comparable<PRINT_OUT_ELEMENT> {
FULL_NAME("Constraint"),
TEMPLATE_NAME("Template"),
ACTIVATION("Activation"),
TARGET("Target"),
SUPPORT("Support"),
CONFIDENCE_LEVEL("Confidence level"),
INTEREST_FACTOR("Interest factor");
private final String label;
private PRINT_OUT_ELEMENT(String label) {
this.label = label;
}
public String toString() {
return this.label;
}
};
/**
* Prints the CSV format of the constraints bag. The columns appearing in the file can be customised.
* @param columns A sorted set of columns. See the <code>PRINT_OUT_ELEMENT</code> enumeration.
* @param proMod A declarative process model.
* @return A CSV string containing the constraints bag.
*/
public String printAsCsv(Collection<PRINT_OUT_ELEMENT> columns, ProcessModel proMod) {
StringBuilder
superSbuf = new StringBuilder(),
sottoSbuf = new StringBuilder();
for (PRINT_OUT_ELEMENT col : columns) {
if (columns.contains(col)) {
sottoSbuf.append("';'");
sottoSbuf.append(col.toString());
}
}
superSbuf.append(sottoSbuf.substring(2)+"'");
superSbuf.append("\n");
sottoSbuf = new StringBuilder();
for (TaskChar key : proMod.bag.getTaskChars()) {
for (Constraint c : proMod.bag.getConstraintsOf(key)) {
for (PRINT_OUT_ELEMENT col : columns) {
if (columns.contains(col)) {
sottoSbuf.append(';');
switch(col) {
case FULL_NAME:
sottoSbuf.append("'" + c.toString() + "'");
break;
case TEMPLATE_NAME:
sottoSbuf.append("'" + c.getTemplateName() + "'");
break;
case ACTIVATION:
sottoSbuf.append("'" + c.getBase() + "'");
break;
case TARGET:
sottoSbuf.append(c.getImplied() == null ? "" : "'" + c.getImplied() + "'");
break;
case SUPPORT:
sottoSbuf.append(String.format(Locale.ENGLISH, "%.3f", c.getSupport()));
break;
case CONFIDENCE_LEVEL:
sottoSbuf.append(String.format(Locale.ENGLISH, "%.3f", c.getConfidence()));
break;
case INTEREST_FACTOR:
sottoSbuf.append(String.format(Locale.ENGLISH, "%.3f", c.getInterestFactor()));
break;
default:
break;
}
}
}
superSbuf.append(sottoSbuf.substring(1));
superSbuf.append("\n");
sottoSbuf = new StringBuilder();
}
}
return superSbuf.toString();
}
}
| 2,677 | 28.108696 | 102 | java |
Janus | Janus-master/src/minerful/io/encdec/csv/package-info.java | /**
* @author Claudio Di Ciccio ([email protected])
* Utility classes for the print-out of declarative process models as CSV files.
*/
package minerful.io.encdec.csv; | 172 | 33.6 | 80 | java |
Janus | Janus-master/src/minerful/io/encdec/declaremap/DeclareMapEncoderDecoder.java | package minerful.io.encdec.declaremap;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Pattern;
import org.processmining.plugins.declareminer.visualizing.ActivityDefinition;
import org.processmining.plugins.declareminer.visualizing.AssignmentModel;
import org.processmining.plugins.declareminer.visualizing.AssignmentModelView;
import org.processmining.plugins.declareminer.visualizing.ConstraintDefinition;
import org.processmining.plugins.declareminer.visualizing.ConstraintTemplate;
import org.processmining.plugins.declareminer.visualizing.DeclareMap;
import org.processmining.plugins.declareminer.visualizing.IItem;
import org.processmining.plugins.declareminer.visualizing.Language;
import org.processmining.plugins.declareminer.visualizing.LanguageGroup;
import org.processmining.plugins.declareminer.visualizing.Parameter;
import org.processmining.plugins.declareminer.visualizing.TemplateBroker;
import org.processmining.plugins.declareminer.visualizing.XMLBrokerFactory;
import com.jgraph.layout.JGraphFacade;
import com.jgraph.layout.organic.JGraphOrganicLayout;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharFactory;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.io.encdec.DeclareConstraintTransferObject;
import minerful.io.encdec.TransferObjectToConstraintTranslator;
import minerful.utils.ResourceReader;
public class DeclareMapEncoderDecoder {
private List<DeclareConstraintTransferObject> constraintTOs;
// private List<DeclareConstraintTransferObject> unmappedConstraintTOs;
private TaskCharArchive taskCharArchive = null;
private String processModelName = null;
public static final String
INTEREST_FACTOR_LABEL = "IF",
CONFIDENCE_LABEL = "confidence",
SUPPORT_LABEL = "support",
LABEL_VALUE_SEPARATOR = ";";
public static final String
INTEREST_FACTOR_EXTRACTION_REG_EXP =
".*" + INTEREST_FACTOR_LABEL + LABEL_VALUE_SEPARATOR + "([0-9\\.]+).*",
CONFIDENCE_EXTRACTION_REG_EXP =
".*" + CONFIDENCE_LABEL + LABEL_VALUE_SEPARATOR + "([0-9\\.]+).*",
SUPPORT_EXTRACTION_REG_EXP =
".*" + SUPPORT_LABEL + LABEL_VALUE_SEPARATOR + "([0-9\\.]+).*";
public static final Pattern
SUPPORT_PATTERN = Pattern.compile(SUPPORT_EXTRACTION_REG_EXP),
CONFIDENCE_PATTERN = Pattern.compile(CONFIDENCE_EXTRACTION_REG_EXP),
INTEREST_FACTOR_PATTERN = Pattern.compile(INTEREST_FACTOR_EXTRACTION_REG_EXP);
public static final String
SUPPORT_CONFIDENCE_IF_FORMAT_PATTERN =
SUPPORT_LABEL + LABEL_VALUE_SEPARATOR + "%f" + LABEL_VALUE_SEPARATOR
+ CONFIDENCE_LABEL + LABEL_VALUE_SEPARATOR + "%f" + LABEL_VALUE_SEPARATOR
+ INTEREST_FACTOR_LABEL + LABEL_VALUE_SEPARATOR + "%f";
public static final String TEMPLATE_TEMP_FILE_EXTENSION = ".xml";
public static final String TEMPLATE_TMP_FILE_BASENAME = "ConDecTemplate";
public static final String DECLARE_XML_TEMPLATE_LIBRARY_URL = "minerful/io/encdec/declaremap/";
public static final String DECLARE_XML_TEMPLATE = TEMPLATE_TMP_FILE_BASENAME + TEMPLATE_TEMP_FILE_EXTENSION;
public DeclareMapEncoderDecoder(ProcessModel process) {
this.constraintTOs = new ArrayList<DeclareConstraintTransferObject>(process.bag.howManyConstraints());
// this.unmappedConstraintTOs = new ArrayList<DeclareConstraintTransferObject>();
this.taskCharArchive = process.getTaskCharArchive();
this.processModelName = process.getName();
Collection<Constraint> auxConstraints = null;
DeclareConstraintTransferObject auxDeclareConstraintTO = null;
for (TaskChar tChar: process.bag.getTaskChars()) {
auxConstraints = process.bag.getConstraintsOf(tChar);
for (Constraint auxCon : auxConstraints) {
// Only the mininmal content is saved: redundant, conflicting, or below-the-thresholds constraints are not included in the output.
if (!auxCon.isMarkedForExclusion()) {
auxDeclareConstraintTO = new DeclareConstraintTransferObject(auxCon);
if (auxDeclareConstraintTO.declareMapTemplate != null) {
this.constraintTOs.add(auxDeclareConstraintTO);
}
}
}
}
}
public DeclareMapEncoderDecoder(String declareMapFilePath) {
AssignmentModel declareMapModel = DeclareMapReaderWriter.readFromFile(declareMapFilePath);
this.buildFromDeclareMapModel(declareMapModel);
}
public DeclareMapEncoderDecoder(AssignmentModel declareMapModel) {
this.buildFromDeclareMapModel(declareMapModel);
}
private void buildFromDeclareMapModel(AssignmentModel declareMapModel) {
/* Record the name of the process */
this.processModelName = declareMapModel.getName();
/* Create an archive of TaskChars out of the activity definitions in the Declare Map model */
Collection<TaskChar> tasksInDeclareMap = new ArrayList<TaskChar>(declareMapModel.activityDefinitionsCount());
TaskCharFactory tChFactory = new TaskCharFactory();
for (ActivityDefinition ad : declareMapModel.getActivityDefinitions()) {
tasksInDeclareMap.add(tChFactory.makeTaskChar(ad.getName()));
}
this.taskCharArchive = new TaskCharArchive(tasksInDeclareMap);
/* Create DTOs for constraints out of the definitions in the Declare Map model */
this.constraintTOs = new ArrayList<DeclareConstraintTransferObject>(declareMapModel.constraintDefinitionsCount());
for (ConstraintDefinition cd : declareMapModel.getConstraintDefinitions()) {
this.constraintTOs.add(new DeclareConstraintTransferObject(cd));
}
}
public ProcessModel createMinerFulProcessModel() {
Collection<Constraint> minerFulConstraints = new ArrayList<Constraint>(this.constraintTOs.size());
TransferObjectToConstraintTranslator miFuConMak = new TransferObjectToConstraintTranslator(this.taskCharArchive);
Constraint tmpCon = null;
for (DeclareConstraintTransferObject conTO: constraintTOs) {
tmpCon = miFuConMak.createConstraint(conTO);
if (tmpCon != null) {
minerFulConstraints.add(tmpCon);
}
}
MetaConstraintUtils.createHierarchicalLinks(new TreeSet<Constraint>(minerFulConstraints));
ConstraintsBag constraintsBag = new ConstraintsBag(this.taskCharArchive.getTaskChars(), minerFulConstraints);
return new ProcessModel(taskCharArchive, constraintsBag, this.processModelName);
}
public List<DeclareConstraintTransferObject> getConstraintTOs() {
return constraintTOs;
}
public DeclareMap createDeclareMap() {
// return this.createDeclareMap(true);
// }
// public DeclareMap createDeclareMap(boolean addUnmappedDeclareMapConstraints) {
Map<String, DeclareMapTemplate> templateNameStringDeclareTemplateMap = new HashMap<String, DeclareMapTemplate>();
DeclareMapTemplate[] declareTemplates = DeclareMapTemplate.values();
for (DeclareMapTemplate d : declareTemplates) {
String templateNameString = d.getName();
templateNameStringDeclareTemplateMap.put(templateNameString, d);
}
Map<DeclareMapTemplate, ConstraintTemplate> declareTemplateDefinitionsMap = readConstraintTemplates(templateNameStringDeclareTemplateMap);
InputStream ir = loadConDecXmlTemplate();
File language = null;
try {
language = File.createTempFile(DeclareMapEncoderDecoder.TEMPLATE_TMP_FILE_BASENAME, DeclareMapEncoderDecoder.TEMPLATE_TEMP_FILE_EXTENSION);
BufferedReader br = new BufferedReader(new InputStreamReader(ir));
String line = br.readLine();
PrintStream out = new PrintStream(language);
while (line != null) {
out.println(line);
line = br.readLine();
}
out.flush();
out.close();
} catch (IOException e1) {
e1.printStackTrace();
}
TemplateBroker template = XMLBrokerFactory.newTemplateBroker(language.getAbsolutePath());
List<Language> languages = template.readLanguages();
Language lang = languages.get(0);
AssignmentModel model = new AssignmentModel(lang);
model.setName(this.processModelName);
ActivityDefinition activitydefinition = null;
ConstraintDefinition constraintDefinition = null;
int constraintID = 0;
/* Save activity definitions */
for (TaskChar tCh : this.taskCharArchive.getTaskChars()) {
activitydefinition = model.addActivityDefinition(tCh.identifier);
activitydefinition.setName(tCh.getName());
}
for (DeclareConstraintTransferObject constraintTo : constraintTOs) {
constraintID++;
constraintDefinition = createConstraintDefinition(
declareTemplateDefinitionsMap, model, constraintID, constraintTo);
model.addConstraintDefiniton(constraintDefinition);
}
// if (addUnmappedDeclareMapConstraints) {
// for (DeclareConstraintTransferObject constraintTo : unmappedConstraintTOs) {
//
// }
// }
AssignmentModelView view = new AssignmentModelView(model);
DeclareMap map = new DeclareMap(model, null, view, null, null, null);
final JGraphOrganicLayout oc = new JGraphOrganicLayout();
oc.setDeterministic(true);
oc.setOptimizeBorderLine(true);
oc.setOptimizeEdgeCrossing(true);
oc.setOptimizeEdgeDistance(true);
oc.setOptimizeEdgeLength(true);
oc.setOptimizeNodeDistribution(true);
oc.setEdgeCrossingCostFactor(999999999);
oc.setEdgeDistanceCostFactor(999999999);
oc.setFineTuning(true);
// oc.setMinDistanceLimit(0.001);
oc.setEdgeLengthCostFactor(9999);
if(map.getModel().getConstraintDefinitions().size()<200) {
oc.setEdgeLengthCostFactor(99);
}
oc.setNodeDistributionCostFactor(999999999);
oc.setBorderLineCostFactor(999);
oc.setRadiusScaleFactor(0.9);
final JGraphFacade jgf = new JGraphFacade(view.getGraph());
oc.run(jgf);
final Map<?, ?> nestedMap = jgf.createNestedMap(true, true);
view.getGraph().getGraphLayoutCache().edit(nestedMap);
return map;
}
private ConstraintDefinition createConstraintDefinition(
Map<DeclareMapTemplate, ConstraintTemplate> declareTemplateDefinitionsMap,
AssignmentModel model, int constraintID,
DeclareConstraintTransferObject constraintTo) {
/* Load constraint definition */
ConstraintDefinition constraintDefinition = new ConstraintDefinition(constraintID, model, declareTemplateDefinitionsMap.get(constraintTo.declareMapTemplate));
Collection<Parameter> parameters = (declareTemplateDefinitionsMap.get(constraintTo.declareMapTemplate)).getParameters();
Iterator<Set<String>> paramsIterator = constraintTo.parameters.iterator();
/* Fill in parameters */
for (Parameter parameter : parameters) {
for (String branchName : paramsIterator.next()) {
ActivityDefinition activityDefinition = model.activityDefinitionWithName(branchName);
constraintDefinition.addBranch(parameter, activityDefinition);
}
}
/* Specify the support, confidence and interest factor within the text */
constraintDefinition.setText(
constraintDefinition.getText() +
LABEL_VALUE_SEPARATOR +
String.format(SUPPORT_CONFIDENCE_IF_FORMAT_PATTERN,
constraintTo.support,
constraintTo.confidence,
constraintTo.interestFactor)
);
return constraintDefinition;
}
public static Map<DeclareMapTemplate, ConstraintTemplate> readConstraintTemplates(Map<String, DeclareMapTemplate> templateNameStringDeclareTemplateMap) {
InputStream templateInputStream = //ClassLoader.getSystemClassLoader().getResourceAsStream(DeclareMapEncoderDecoder.DECLARE_XML_TEMPLATE);
loadConDecXmlTemplate();
File languageFile = null;
try {
languageFile = File.createTempFile(DeclareMapEncoderDecoder.TEMPLATE_TMP_FILE_BASENAME, DeclareMapEncoderDecoder.TEMPLATE_TEMP_FILE_EXTENSION);
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(templateInputStream));
String line = bufferedReader.readLine();
PrintStream out = new PrintStream(languageFile);
while (line != null) {
out.println(line);
line = bufferedReader.readLine();
}
out.flush();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
TemplateBroker templateBroker = XMLBrokerFactory.newTemplateBroker(languageFile.getAbsolutePath());
List<Language> languagesList = templateBroker.readLanguages();
// The first language in the list is the ConDec language, which is what we need
Language condecLanguage = languagesList.get(0);
List<IItem> templateList = new ArrayList<IItem>();
List<IItem> condecLanguageChildrenList = condecLanguage.getChildren();
for (IItem condecLanguageChild : condecLanguageChildrenList) {
if (condecLanguageChild instanceof LanguageGroup) {
templateList.addAll(visit(condecLanguageChild));
} else {
templateList.add(condecLanguageChild);
}
}
Map<DeclareMapTemplate, ConstraintTemplate> declareTemplateConstraintTemplateMap = new HashMap<DeclareMapTemplate, ConstraintTemplate>();
for (IItem item : templateList) {
if(item instanceof ConstraintTemplate) {
ConstraintTemplate constraintTemplate = (ConstraintTemplate)item;
if(templateNameStringDeclareTemplateMap.containsKey(constraintTemplate.getName())) {
declareTemplateConstraintTemplateMap.put(templateNameStringDeclareTemplateMap.get(constraintTemplate.getName()), constraintTemplate);
} else {
}
}
}
return declareTemplateConstraintTemplateMap;
}
private static InputStream loadConDecXmlTemplate() {
return ResourceReader.loadResource(
DeclareMapEncoderDecoder.DECLARE_XML_TEMPLATE_LIBRARY_URL +
DeclareMapEncoderDecoder.DECLARE_XML_TEMPLATE);
}
private static List<IItem> visit(IItem item) {
List<IItem> templateList = new ArrayList<IItem>();
if (item instanceof LanguageGroup) {
LanguageGroup languageGroup = (LanguageGroup) item;
List<IItem> childrenList = languageGroup.getChildren();
for (IItem child : childrenList) {
if (child instanceof LanguageGroup) {
templateList.addAll(visit(child));
}else {
templateList.add(child);
}
}
}
return templateList;
}
} | 14,108 | 41.116418 | 160 | java |
Janus | Janus-master/src/minerful/io/encdec/declaremap/DeclareMapReaderWriter.java | package minerful.io.encdec.declaremap;
import java.io.File;
import org.processmining.plugins.declareminer.visualizing.AssignmentModel;
import org.processmining.plugins.declareminer.visualizing.AssignmentModelView;
import org.processmining.plugins.declareminer.visualizing.AssignmentViewBroker;
import org.processmining.plugins.declareminer.visualizing.DeclareMap;
import org.processmining.plugins.declareminer.visualizing.XMLBrokerFactory;
public class DeclareMapReaderWriter {
public static AssignmentModel readFromFile(String declareMapFilePath) {
File inputFile = new File(declareMapFilePath);
if (!inputFile.canRead() || !inputFile.isFile()) {
throw new IllegalArgumentException("Unreadable file: " + declareMapFilePath);
}
AssignmentViewBroker broker = XMLBrokerFactory.newAssignmentBroker(declareMapFilePath);
AssignmentModel model = broker.readAssignment();
AssignmentModelView view = new AssignmentModelView(model);
broker.readAssignmentGraphical(model, view);
return model;
}
public static void marshal(String outfilePath, DeclareMap map) {
AssignmentViewBroker broker = XMLBrokerFactory.newAssignmentBroker(outfilePath);
broker.addAssignmentAndView(map.getModel(), map.getView());
}
}
| 1,233 | 37.5625 | 89 | java |
Janus | Janus-master/src/minerful/io/encdec/declaremap/DeclareMapTemplate.java | package minerful.io.encdec.declaremap;
import minerful.utils.MessagePrinter;
import org.apache.commons.lang3.text.WordUtils;
public enum DeclareMapTemplate {
Absence, Absence2, Absence3, Alternate_Precedence, Alternate_Response, Alternate_Succession,
Chain_Precedence, Chain_Response, Chain_Succession, Choice, CoExistence,
Exactly1, Exactly2, Exclusive_Choice, Existence, Existence2, Existence3,
Init,
Not_Chain_Succession, Not_CoExistence, Not_Succession,
Precedence, Response, Responded_Existence,
Succession,
Not_Chain_Precedence, Not_Chain_Response,
Not_Precedence, Not_Response,
Not_Responded_Existence,
BeforeThisOrLaterThat;
public String getName() {
switch(this) {
case CoExistence:
case Not_CoExistence:
return this.toString().replaceAll("_", " ").toLowerCase().replace("coexi", "co-exi");
default:
return this.toString().replaceAll("_", " ").toLowerCase();
}
}
public static DeclareMapTemplate fromName(String name) {
name = WordUtils.capitalizeFully(name).replaceAll(" ", "_");
if (name.contains("Co-exi")) {
name = name.replace("Co-exi", "CoExi");
}
DeclareMapTemplate mapTemplate = null;
try {
mapTemplate = DeclareMapTemplate.valueOf(name);
} catch (IllegalArgumentException e) {
MessagePrinter.printlnError("The " + name + " template is not yet defined in MINERful import library.");
}
return mapTemplate;
}
} | 1,398 | 30.088889 | 107 | java |
Janus | Janus-master/src/minerful/io/encdec/declaremap/DeclareMapToMinerFulTemplatesTranslator.java | package minerful.io.encdec.declaremap;
import java.util.HashMap;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.concept.constraint.existence.AtMostOne;
import minerful.concept.constraint.existence.End;
import minerful.concept.constraint.existence.ExactlyOne;
import minerful.concept.constraint.existence.Init;
import minerful.concept.constraint.existence.Participation;
import minerful.concept.constraint.existence.Absence;
import minerful.concept.constraint.nonDeclare.BeforeThisOrLaterThat;
import minerful.concept.constraint.relation.AlternatePrecedence;
import minerful.concept.constraint.relation.AlternateResponse;
import minerful.concept.constraint.relation.AlternateSuccession;
import minerful.concept.constraint.relation.ChainPrecedence;
import minerful.concept.constraint.relation.ChainResponse;
import minerful.concept.constraint.relation.ChainSuccession;
import minerful.concept.constraint.relation.CoExistence;
import minerful.concept.constraint.relation.NotChainSuccession;
import minerful.concept.constraint.relation.NotCoExistence;
import minerful.concept.constraint.relation.NotSuccession;
import minerful.concept.constraint.relation.Precedence;
import minerful.concept.constraint.relation.RespondedExistence;
import minerful.concept.constraint.relation.Response;
import minerful.concept.constraint.relation.Succession;
import minerful.utils.MessagePrinter;
public class DeclareMapToMinerFulTemplatesTranslator {
public static HashMap<Class<? extends Constraint>, DeclareMapTemplate> MINERFUL_2_DECLARE_MAP =
new HashMap<Class<? extends Constraint>, DeclareMapTemplate>(
MetaConstraintUtils.NUMBER_OF_DISCOVERABLE_EXISTENCE_CONSTRAINT_TEMPLATES + MetaConstraintUtils.NUMBER_OF_DISCOVERABLE_RELATION_CONSTRAINT_TEMPLATES+MetaConstraintUtils.NUMBER_OF_NON_DECLARE_CONSTRAINT_TEMPLATES,
1.0F);
public static HashMap<DeclareMapTemplate, Class<? extends Constraint>> DECLARE_2_MINERFUL_MAP =
new HashMap<DeclareMapTemplate, Class<? extends Constraint>>(
MetaConstraintUtils.NUMBER_OF_EXISTENCE_CONSTRAINT_TEMPLATES + MetaConstraintUtils.NUMBER_OF_RELATION_CONSTRAINT_TEMPLATES+MetaConstraintUtils.NUMBER_OF_NON_DECLARE_CONSTRAINT_TEMPLATES,
1.0F);
// public static HashMap<String, Class<? extends Constraint>> STRING_2_MINERFUL_MAP =
static {
MINERFUL_2_DECLARE_MAP.put(Init.class, DeclareMapTemplate.Init);
MINERFUL_2_DECLARE_MAP.put(End.class, DeclareMapTemplate.Existence); // Declare does not cover the concept of "End"
MINERFUL_2_DECLARE_MAP.put(Participation.class, DeclareMapTemplate.Existence);
MINERFUL_2_DECLARE_MAP.put(AtMostOne.class, DeclareMapTemplate.Absence2);
MINERFUL_2_DECLARE_MAP.put(Absence.class, DeclareMapTemplate.Absence);
// Relation
MINERFUL_2_DECLARE_MAP.put(RespondedExistence.class, DeclareMapTemplate.Responded_Existence);
MINERFUL_2_DECLARE_MAP.put(Response.class, DeclareMapTemplate.Response);
MINERFUL_2_DECLARE_MAP.put(Precedence.class, DeclareMapTemplate.Precedence);
MINERFUL_2_DECLARE_MAP.put(AlternateResponse.class, DeclareMapTemplate.Alternate_Response);
MINERFUL_2_DECLARE_MAP.put(AlternatePrecedence.class, DeclareMapTemplate.Alternate_Precedence);
MINERFUL_2_DECLARE_MAP.put(ChainResponse.class, DeclareMapTemplate.Chain_Response);
MINERFUL_2_DECLARE_MAP.put(ChainPrecedence.class, DeclareMapTemplate.Chain_Precedence);
// Mutual relation
MINERFUL_2_DECLARE_MAP.put(CoExistence.class, DeclareMapTemplate.CoExistence);
MINERFUL_2_DECLARE_MAP.put(Succession.class, DeclareMapTemplate.Succession);
MINERFUL_2_DECLARE_MAP.put(AlternateSuccession.class, DeclareMapTemplate.Alternate_Succession);
MINERFUL_2_DECLARE_MAP.put(ChainSuccession.class, DeclareMapTemplate.Chain_Succession);
// Negation relation
MINERFUL_2_DECLARE_MAP.put(NotCoExistence.class, DeclareMapTemplate.Not_CoExistence);
MINERFUL_2_DECLARE_MAP.put(NotChainSuccession.class, DeclareMapTemplate.Not_Chain_Succession);
MINERFUL_2_DECLARE_MAP.put(NotSuccession.class, DeclareMapTemplate.Not_Succession);
// Non Declare relation
MINERFUL_2_DECLARE_MAP.put(BeforeThisOrLaterThat.class, DeclareMapTemplate.BeforeThisOrLaterThat);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Init, Init.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Existence, Participation.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Absence2, AtMostOne.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Exactly1, ExactlyOne.class);
// Relation
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Responded_Existence, RespondedExistence.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Response, Response.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Precedence, Precedence.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Alternate_Response, AlternateResponse.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Alternate_Precedence, AlternatePrecedence.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Chain_Response, ChainResponse.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Chain_Precedence, ChainPrecedence.class);
// Mutual relation
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.CoExistence, CoExistence.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Succession, Succession.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Alternate_Succession, AlternateSuccession.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Chain_Succession, ChainSuccession.class);
// Negation relation
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Not_CoExistence, NotCoExistence.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Not_Chain_Succession, NotChainSuccession.class);
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.Not_Succession, NotSuccession.class);
// Non Declare relation
DECLARE_2_MINERFUL_MAP.put(DeclareMapTemplate.BeforeThisOrLaterThat, BeforeThisOrLaterThat.class);
}
public static DeclareMapTemplate translateTemplateName(Class<? extends Constraint> constraintClass) {
if (MINERFUL_2_DECLARE_MAP.containsKey(constraintClass)) {
return MINERFUL_2_DECLARE_MAP.get(constraintClass);
}
return null;
}
public static Class<? extends Constraint> translateTemplateName(DeclareMapTemplate declareMapTemplate) {
if (DECLARE_2_MINERFUL_MAP.containsKey(declareMapTemplate)) {
return DECLARE_2_MINERFUL_MAP.get(declareMapTemplate);
} else {
MessagePrinter.getInstance(DeclareMapEncoderDecoder.class).warn("Unmapped native Declare Map template: " + declareMapTemplate);
return null;
}
}
} | 6,565 | 58.690909 | 217 | java |
Janus | Janus-master/src/minerful/io/encdec/json/JsonPojoEncoderDecoder.java | package minerful.io.encdec.json;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.PrintWriter;
import java.util.Map;
import minerful.concept.constraint.Constraint;
import minerful.io.encdec.pojo.ConstraintPojo;
import minerful.io.encdec.pojo.ProcessModelPojo;
import com.google.gson.Gson;
import com.google.gson.JsonIOException;
import com.google.gson.JsonSyntaxException;
public class JsonPojoEncoderDecoder {
private Gson gson;
public JsonPojoEncoderDecoder() {
this.gson = new Gson();
}
public ConstraintPojo fromJsonToConstraintPojo(String json) {
return this.gson.fromJson(json, ConstraintPojo.class);
}
public ConstraintPojo fromJsonToConstraintPojo(File jsonFile) throws JsonSyntaxException, JsonIOException, FileNotFoundException {
return this.gson.fromJson(new FileReader(jsonFile), ConstraintPojo.class);
}
public String fromConstraintPojoToJson(ConstraintPojo pojo) {
return this.gson.toJson(pojo);
}
public void saveConstraintPojo(ConstraintPojo pojo, File jsonFile) throws JsonIOException, FileNotFoundException {
PrintWriter priWri = new PrintWriter(jsonFile);
this.gson.toJson(pojo, priWri);
priWri.flush();
priWri.close();
}
public ProcessModelPojo fromJsonToProcessModelPojo(String json) {
return this.gson.fromJson(json, ProcessModelPojo.class);
}
public ProcessModelPojo fromJsonToProcessModelPojo(File jsonFile) throws JsonSyntaxException, JsonIOException, FileNotFoundException {
return this.gson.fromJson(new FileReader(jsonFile), ProcessModelPojo.class);
}
public String fromProcessModelPojoToJson(ProcessModelPojo pojo) {
return this.gson.toJson(pojo);
}
public void saveProcessModelPojo(ProcessModelPojo pojo, File jsonFile) throws JsonIOException, FileNotFoundException {
PrintWriter priWri = new PrintWriter(jsonFile);
this.gson.toJson(pojo, priWri);
priWri.flush();
priWri.close();
}
} | 1,924 | 33.375 | 135 | java |
Janus | Janus-master/src/minerful/io/encdec/log/IOutEncoder.java | package minerful.io.encdec.log;
import java.io.File;
import java.io.IOException;
public interface IOutEncoder {
public abstract void setTraces(String[] traces);
public abstract File encodeToFile(File outFile) throws IOException;
public abstract String encodeToString() throws IOException;
} | 299 | 20.428571 | 68 | java |
Janus | Janus-master/src/minerful/io/encdec/log/MxmlEncoder.java | package minerful.io.encdec.log;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import org.deckfour.xes.out.XMxmlSerializer;
public class MxmlEncoder extends XesEncoder {
public MxmlEncoder(String[] traces) {
super(traces);
}
@Override
public File encodeToFile(File outFile) throws IOException {
OutputStream outStream = new FileOutputStream(outFile);
new XMxmlSerializer().serialize(this.xLog, outStream);
return outFile;
}
@Override
public String encodeToString() throws IOException {
OutputStream outStream = new ByteArrayOutputStream();
new XMxmlSerializer().serialize(this.xLog, outStream);
return outStream.toString();
}
} | 757 | 24.266667 | 60 | java |
Janus | Janus-master/src/minerful/io/encdec/log/TaskTracesFromStringsLog.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.io.encdec.log;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.apache.log4j.Logger;
public class TaskTracesFromStringsLog {
private static Logger logger;
protected File stringsLogFile;
private Set<Character> alphabet;
public TaskTracesFromStringsLog(File stringsLogFile) throws Exception {
if (logger == null) {
logger = Logger.getLogger(this.getClass().getCanonicalName());
}
if (!stringsLogFile.canRead()) {
throw new IllegalArgumentException("Unparsable log file: " + stringsLogFile.getAbsolutePath());
}
this.stringsLogFile = stringsLogFile;
alphabet = new TreeSet<Character>();
}
public String[] extractTraces() throws Exception {
List<String> traces = new ArrayList<String>();
FileInputStream fstream;
fstream = new FileInputStream(this.stringsLogFile);
DataInputStream in = new DataInputStream(fstream);
BufferedReader br = new BufferedReader(new InputStreamReader(in));
String strLine = br.readLine();
while (strLine != null) {
strLine = strLine.trim();
traces.add(strLine);
for (char c : strLine.toCharArray()) {
alphabet.add(c);
}
strLine = br.readLine();
}
in.close();
return traces.toArray(new String[traces.size()]);
}
public Character[] getAlphabet() {
return alphabet.toArray(new Character[alphabet.size()]);
}
}
| 1,859 | 28.52381 | 104 | java |
Janus | Janus-master/src/minerful/io/encdec/log/XesEncoder.java | package minerful.io.encdec.log;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import org.deckfour.xes.classification.XEventNameClassifier;
import org.deckfour.xes.extension.std.XConceptExtension;
import org.deckfour.xes.extension.std.XLifecycleExtension;
import org.deckfour.xes.extension.std.XTimeExtension;
import org.deckfour.xes.factory.XFactory;
import org.deckfour.xes.factory.XFactoryBufferedImpl;
import org.deckfour.xes.model.XEvent;
import org.deckfour.xes.model.XLog;
import org.deckfour.xes.model.XTrace;
import org.deckfour.xes.out.XesXmlSerializer;
public class XesEncoder implements IOutEncoder {
protected XLog xLog = null;
public XesEncoder(String[] traces) {
this.xLog = this.encode(traces);
}
@Override
public void setTraces(String[] traces) {
this.encode(traces);
}
private XLog encode(String[] traces) {
XFactory xFactory = new XFactoryBufferedImpl();
XLog xLog = xFactory.createLog();
XTrace xTrace = null;
XEvent xEvent = null;
XConceptExtension concExtino = XConceptExtension.instance();
XLifecycleExtension lifeExtension = XLifecycleExtension.instance();
XTimeExtension timeExtension = XTimeExtension.instance();
xLog.getExtensions().add(concExtino);
xLog.getExtensions().add(lifeExtension);
xLog.getExtensions().add(timeExtension);
xLog.getClassifiers().add(new XEventNameClassifier());
concExtino.assignName(xLog, "Synthetic log");
lifeExtension.assignModel(xLog, XLifecycleExtension.VALUE_MODEL_STANDARD);
int tracesCounter = 1;
Date currentDate = null;
for (String trace : traces) {
xTrace = xFactory.createTrace();
int padder = (int)(Math.ceil(Math.log10(traces.length)));
concExtino.assignName(
xTrace,
String.format("Synthetic trace no. " +
(padder < 1 ? "" : "%0" + padder) + "d", (tracesCounter++)
)
);
if (trace.length() > 0) {
for (Character charTask : trace.toCharArray()) {
xEvent = xFactory.createEvent();
concExtino.assignName(xEvent, charTask.toString());
lifeExtension.assignStandardTransition(xEvent, XLifecycleExtension.StandardModel.COMPLETE);
currentDate = generateRandomDateTimeForLogEvent(currentDate);
timeExtension.assignTimestamp(xEvent, currentDate);
xTrace.add(xEvent);
}
}
xLog.add(xTrace);
}
return xLog;
}
private Date generateRandomDateTimeForLogEvent() {
return generateRandomDateTimeForLogEvent(null);
}
private Date generateRandomDateTimeForLogEvent(Date laterThan) {
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
if (laterThan == null) {
cal.add(GregorianCalendar.YEAR, -1);
cal.add(GregorianCalendar.MONTH, (int) ( Math.ceil(Math.random() * 12 ) * -1 ) );
cal.add(GregorianCalendar.WEEK_OF_MONTH, (int) ( Math.ceil(Math.random() * 4 ) * -1 ) );
cal.add(GregorianCalendar.DAY_OF_WEEK, (int) ( Math.ceil(Math.random() * 7 ) * -1 ) );
laterThan = cal.getTime();
}
long
randomAdditionalTime = (long) (Math.ceil(Math.random() * TimeUnit.DAYS.toMillis(1)));
cal.setTimeInMillis(laterThan.getTime() + randomAdditionalTime);
return cal.getTime();
}
@Override
public File encodeToFile(File outFile) throws IOException {
OutputStream outStream = new FileOutputStream(outFile);
new XesXmlSerializer().serialize(this.xLog, outStream);
return outFile;
}
@Override
public String encodeToString() throws IOException {
OutputStream outStream = new ByteArrayOutputStream();
new XesXmlSerializer().serialize(this.xLog, outStream);
return outStream.toString();
}
} | 3,808 | 31.279661 | 96 | java |
Janus | Janus-master/src/minerful/io/encdec/pojo/ConstraintPojo.java | package minerful.io.encdec.pojo;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
public class ConstraintPojo implements Comparable<ConstraintPojo> {
public String template;
public List<Set<String>> parameters;
public Double support;
public Double confidence;
public Double interestFactor;
public ConstraintPojo() {
this.parameters = new ArrayList<Set<String>>();
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("DeclareConstraintPojo [template=");
builder.append(template);
builder.append(", parameters=");
builder.append(parameters);
builder.append(", support=");
builder.append(support);
builder.append(", confidence=");
builder.append(confidence);
builder.append(", interestFactor=");
builder.append(interestFactor);
builder.append("]");
return builder.toString();
}
@Override
public int compareTo(ConstraintPojo o) {
int result = 0;
/* Compare the template name */
if (this.template == null) {
if (o.template != null) {
return 1;
}
} else {
if (o.template == null) {
return -1;
}
//if (this.template == "End" && o.template == "Participation")
//System.err.println(this.template + " vs " + o.template + " = " + this.template.compareTo(o.template));
result = this.template.compareTo(o.template);
}
/* Compare the number of parameters */
if (result == 0) {
if (this.parameters == null) {
if (o.parameters != null) {
return 1;
}
} else {
if (o.parameters == null) {
return -1;
}
}
result = new Integer(this.parameters.size()).compareTo(o.parameters.size());
}
if (result == 0) {
/* Compare the parameters' sizes */
for (int i = 0; i < this.parameters.size() && result == 0; i++) {
if (this.parameters.get(i) == null) {
if (o.parameters.get(i) != null) {
return 1;
}
} else {
if (o.parameters.get(i) == null) {
return -1;
}
}
result = new Integer(this.parameters.get(i).size()).compareTo(o.parameters.get(i).size());
/* Compare the respective parameters' tasks */
if (result == 0) {
Iterator<String>
thisParamsIterator = this.parameters.get(i).iterator(),
oParamsIterator = o.parameters.get(i).iterator();
while (thisParamsIterator.hasNext() && result == 0) {
result = thisParamsIterator.next().compareTo(oParamsIterator.next());
}
}
}
}
return result;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((parameters == null) ? 0 : parameters.hashCode());
result = prime * result
+ ((template == null) ? 0 : template.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
return (this.compareTo((ConstraintPojo)obj) == 0);
}
} | 3,048 | 25.745614 | 104 | java |
Janus | Janus-master/src/minerful/io/encdec/pojo/ProcessModelPojo.java | package minerful.io.encdec.pojo;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
public class ProcessModelPojo {
public String name;
public Set<String> tasks;
public Set<ConstraintPojo> constraints;
public ProcessModelPojo() {
this.tasks = new TreeSet<String>();
this.constraints = new TreeSet<ConstraintPojo>();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((constraints == null) ? 0 : constraints.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((tasks == null) ? 0 : tasks.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ProcessModelPojo other = (ProcessModelPojo) obj;
if (constraints == null) {
if (other.constraints != null)
return false;
} else {
if (constraints.size() != other.constraints.size()) {
return false;
}
Iterator<ConstraintPojo>
thisCnsIt = constraints.iterator(),
otherCnsIt = other.constraints.iterator();
while (thisCnsIt.hasNext() && otherCnsIt.hasNext()) {
if (!thisCnsIt.next().equals(otherCnsIt.next())) {
return false;
}
}
}
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (tasks == null) {
if (other.tasks != null)
return false;
} else {
if (tasks.size() != other.tasks.size()) {
return false;
}
Iterator<String>
thisCnsIt = tasks.iterator(),
otherCnsIt = other.tasks.iterator();
while (thisCnsIt.hasNext() && otherCnsIt.hasNext()) {
if (!thisCnsIt.next().equals(otherCnsIt.next())) {
return false;
}
}
}
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("ProcessModelPojo [name=");
builder.append(name);
builder.append(", tasks=");
builder.append(tasks);
builder.append(", constraints=");
builder.append(constraints);
builder.append("]");
return builder.toString();
}
} | 2,199 | 23.719101 | 69 | java |
Janus | Janus-master/src/minerful/io/params/InputModelParameters.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.io.params;
import java.io.File;
import minerful.params.ParamsManager;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class InputModelParameters extends ParamsManager {
public static final String INPUT_MODELFILE_PATH_PARAM_NAME = "iMF";
public static final String INPUT_MODELFILE_PATH_PARAM_LONG_NAME = "input-model-file";
public static final String INPUT_MODEL_ENC_PARAM_NAME = "iME";
public static final String INPUT_MODEL_ENC_PARAM_LONG_NAME = "input-model-encoding";
public static final InputEncoding DEFAULT_INPUT_MODEL_ENC = InputEncoding.MINERFUL;
/**
* Possible file encodings for marshalled process models.
* @author Claudio Di Ciccio
*/
public enum InputEncoding {
DECLARE_MAP,
MINERFUL, // default
JSON
}
/**
* Input language encoding for the input process model (see {@link InputEncoding InputEncoding}. Default value is {@link #DEFAULT_INPUT_MODEL_ENC DEFAULT_INPUT_MODEL_ENC}
*/
public InputEncoding inputLanguage;
/**
* File in which the process model is stored.
*/
public File inputFile;
public InputModelParameters() {
super();
inputLanguage = InputModelParameters.DEFAULT_INPUT_MODEL_ENC;
inputFile = null;
}
public InputModelParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public InputModelParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.inputFile = openInputFile(line, INPUT_MODELFILE_PATH_PARAM_NAME);
this.inputLanguage = InputEncoding.valueOf(
fromStringToEnumValue(line.getOptionValue(INPUT_MODEL_ENC_PARAM_NAME, this.inputLanguage.toString())
)
);
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt: myOptions.getOptions())
options.addOption((Option)myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(INPUT_MODEL_ENC_PARAM_NAME)
.hasArg().argName("language")
.longOpt(INPUT_MODEL_ENC_PARAM_LONG_NAME)
.desc("input model encoding language " + printValues(InputEncoding.values()) + " (default: " + printValues(DEFAULT_INPUT_MODEL_ENC) + ")")
.type(String.class)
.build()
// .create(INPUT_MODEL_ENC_PARAM_NAME)
);
options.addOption(
Option.builder(INPUT_MODELFILE_PATH_PARAM_NAME)
.hasArg().argName("path")
.longOpt(INPUT_MODELFILE_PATH_PARAM_LONG_NAME)
.desc("path of the file from which the process model should be read")
.type(String.class)
.build()
// .create(INPUT_MODELFILE_PATH_PARAM_NAME)
);
return options;
}
} | 3,373 | 31.442308 | 171 | java |
Janus | Janus-master/src/minerful/io/params/OutputModelParameters.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.io.params;
import java.io.File;
import minerful.io.encdec.csv.CsvEncoder;
import minerful.params.ParamsManager;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class OutputModelParameters extends ParamsManager {
public static final String SAVE_AS_CONDEC_PARAM_NAME = "oConDec";
public static final String SAVE_AS_CSV_PARAM_NAME = "oCSV";
public static final String SAVE_AS_XML_PARAM_NAME = "oXML";
public static final String SAVE_AS_JSON_PARAM_NAME = "oJSON";
public static final String SAVE_PROCESS_DOT_AUTOMATON_PARAM_NAME = "autoDOT";
// public static final String SAVE_PROCESS_CONDENSED_DOT_AUTOMATON_PARAM_NAME = "dotCond"; // TODO To be done, one day
public static final String SAVE_PROCESS_TSML_AUTOMATON_PARAM_NAME = "autoTSML";
public static final String FOLDER_FOR_SAVING_DOT_SUBAUTOMATA_PARAM_NAME = "subautosDOT";
public static final String SAVE_XML_WEIGHTED_AUTOMATON_PARAM_NAME = "autoReplayXML";
public static final String SAVE_SKIMMED_XML_WEIGHTED_AUTOMATON_PARAM_NAME = "autoReplayTrimXML";
public static final String FOLDER_FOR_SAVING_XML_WEIGHTED_SUBAUTOMATA_PARAM_NAME = "subautosReplayXML";
public static final String ENCODE_OUTPUT_TASKS_FLAG = "encodeTasksFlag";
/** File in which discovered constraints are printed in CSV format. Keep it equal to <code>null</code> for avoiding such print-out. */
public File fileToSaveConstraintsAsCSV;
/** Directory in which the discovered constraints are printed as automata, in separate GraphViz DOT files. Keep it equal to <code>null</code> for avoiding such print-outs. */
public File folderToSaveDotFilesForPartialAutomata;
/** File in which the discovered process model is printed as a TSML representation of an automaton. Keep it equal to <code>null</code> for avoiding such print-out. */
public File fileToSaveTsmlFileForAutomaton;
/** File in which the discovered process model is printed as a GraphViz DOT of an automaton. Keep it equal to <code>null</code> for avoiding such print-out. */
public File fileToSaveDotFileForAutomaton;
// /** File in which the discovered process model is printed as a GraphViz DOT of an automaton in which multiple transitions are collapsed into one with many labels, for readability reasons. Keep it equal to <code>null</code> for avoiding such print-out. */
// public File fileToSaveDotFileForCondensedAutomaton; // TODO One day
/** File in which the discovered process model is saved as a Declare XML file. Keep it equal to <code>null</code> for avoiding such print-out. */
public File fileToSaveAsConDec;
/** File in which the discovered process model is printed as an XML representation of an automaton. Transitions are weighted by the number of times the replay of the traces in the event log traverses them. Keep it equal to <code>null</code> for avoiding such print-out. */
public File fileToSaveXmlFileForAutomaton;
/** File in which the discovered process model is printed as an XML representation of an automaton. Transitions are weighted by the number of times the replay of the traces in the event log traverses them. Transitions that are never traversed are removed. Keep it equal to <code>null</code> for avoiding such print-out. */
public File fileToSaveSkimmedXmlFileForAutomaton;
/** Directory in which the discovered constraints are printed as automata, in separate XML files. Keep it equal to <code>null</code> for avoiding such print-outs. */
public File folderToSaveXmlFilesForPartialAutomata;
/** File in which the discovered process model is saved as an XML file. Keep it equal to <code>null</code> for avoiding such print-out. */
public File fileToSaveAsXML;
/** File in which the discovered process model is saved as a JSON file. Keep it equal to <code>null</code> for avoiding such print-out. */
public File fileToSaveAsJSON;
/** Columns to be printed if constraints are printed in CSV format. Notice that this attribute is not associated to a command-line parameter. */
public CsvEncoder.PRINT_OUT_ELEMENT[] csvColumnsToPrint = CsvEncoder.PRINT_OUT_ELEMENT.values();
/** Flag if the output tasks/events should be encoded (e.g., A B C D E...) or not (original names as in log) **/
public boolean encodeOutputTasks;
public OutputModelParameters() {
this.fileToSaveConstraintsAsCSV = null;
this.folderToSaveDotFilesForPartialAutomata = null;
this.fileToSaveTsmlFileForAutomaton = null;
this.fileToSaveDotFileForAutomaton = null;
// this.fileToSaveDotFileForCondensedAutomaton = null; // TODO One day
this.fileToSaveAsConDec = null;
this.fileToSaveXmlFileForAutomaton = null;
this.fileToSaveSkimmedXmlFileForAutomaton = null;
this.folderToSaveXmlFilesForPartialAutomata = null;
this.fileToSaveAsXML = null;
this.fileToSaveAsJSON = null;
this.encodeOutputTasks = false;
}
public OutputModelParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public OutputModelParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.fileToSaveAsXML = openOutputFile(line, SAVE_AS_XML_PARAM_NAME);
this.fileToSaveAsJSON = openOutputFile(line, SAVE_AS_JSON_PARAM_NAME);
this.folderToSaveDotFilesForPartialAutomata = openOutputDir(line, FOLDER_FOR_SAVING_DOT_SUBAUTOMATA_PARAM_NAME);
this.fileToSaveDotFileForAutomaton = openOutputFile(line, SAVE_PROCESS_DOT_AUTOMATON_PARAM_NAME);
// this.fileToSaveDotFileForCondensedAutomaton = openOutputFile(line, SAVE_PROCESS_CONDENSED_DOT_AUTOMATON_PARAM_NAME); // TODO One day
this.fileToSaveTsmlFileForAutomaton = openOutputFile(line, SAVE_PROCESS_TSML_AUTOMATON_PARAM_NAME);
this.fileToSaveConstraintsAsCSV = openOutputFile(line, SAVE_AS_CSV_PARAM_NAME);
this.fileToSaveAsConDec = openOutputFile(line, SAVE_AS_CONDEC_PARAM_NAME);
this.fileToSaveXmlFileForAutomaton = openOutputFile(line, SAVE_XML_WEIGHTED_AUTOMATON_PARAM_NAME);
this.fileToSaveSkimmedXmlFileForAutomaton = openOutputFile(line, SAVE_SKIMMED_XML_WEIGHTED_AUTOMATON_PARAM_NAME);
this.folderToSaveXmlFilesForPartialAutomata = openOutputDir(line, FOLDER_FOR_SAVING_XML_WEIGHTED_SUBAUTOMATA_PARAM_NAME);
this.encodeOutputTasks = line.hasOption(ENCODE_OUTPUT_TASKS_FLAG);
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt: myOptions.getOptions())
options.addOption((Option)myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(SAVE_AS_XML_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-as-xml")
.desc("path of the file in which to save the discovered process model as XML")
.type(String.class)
.build()
);
options.addOption(
Option.builder(SAVE_AS_JSON_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-as-json")
.desc("path of the file in which to save the discovered process model as JSON")
.type(String.class)
.build()
);
options.addOption(
Option.builder(SAVE_PROCESS_DOT_AUTOMATON_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-automaton-dot")
.desc(
"write a Graphviz DOT format of a finite state automaton representing the declarative process"
)
.type(String.class)
.build()
);
// options.addOption( // TODO One day
// Option.builder(SAVE_PROCESS_CONDENSED_DOT_AUTOMATON_PARAM_NAME)
// .hasArg().argName("path")
// .longOpt("save-cond-automaton-dot")
// .desc(
// "write a Graphviz DOT format of a condensed, more readable finite state automaton representing the declarative process"
// )
// .type(String.class)
// .create(SAVE_PROCESS_CONDENSED_DOT_AUTOMATON_PARAM_NAME)
// );
options.addOption(
Option.builder(SAVE_PROCESS_TSML_AUTOMATON_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-automaton-tsml")
.desc(
"write a TSML format of a finite state automaton representing the mined process on the given file"
)
.type(String.class)
.build()
);
options.addOption(
Option.builder(FOLDER_FOR_SAVING_DOT_SUBAUTOMATA_PARAM_NAME)
.hasArg().argName("path")
.longOpt("subautom-folder")
.desc("write the Graphviz DOT format of activities' finite state sub-automata on separate files, within the given folder")
.type(String.class)
.build()
);
options.addOption(
Option.builder(SAVE_AS_CSV_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-as-csv")
.desc("print results in CSV format into the specified file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(SAVE_AS_CONDEC_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-as-condec")
.desc("print the discovered process as a Declare map (ConDec) into the specified file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(SAVE_XML_WEIGHTED_AUTOMATON_PARAM_NAME)
.hasArg().argName("path")
.longOpt("print-replay-autom")
.desc(
attachInstabilityWarningToDescription("print the discovered process in weighted automaton XML format, into the specified file. The weight is computed based on the number of times the event log replay traverses the transition.")
)
.type(String.class)
.build()
);
options.addOption(
Option.builder(SAVE_SKIMMED_XML_WEIGHTED_AUTOMATON_PARAM_NAME)
.hasArg().argName("path")
.longOpt("print-replay-trim-autom")
.desc(
attachInstabilityWarningToDescription("print the discovered process in weighted automaton XML format, into the specified file. Remove the transitions (and states) that have weight 0. The weight is computed based on the number of times the event log replay traverses the transition.")
)
.type(String.class)
.build()
);
options.addOption(
Option.builder(FOLDER_FOR_SAVING_XML_WEIGHTED_SUBAUTOMATA_PARAM_NAME)
.hasArg().argName("path")
.longOpt("xml-subautom-folder")
.desc(
attachInstabilityWarningToDescription("write the weighted automaton XML format of activities' finite state sub-automata on separate files, within the given folder"))
.type(String.class)
.build()
);
options.addOption(
Option.builder(ENCODE_OUTPUT_TASKS_FLAG)
.longOpt("flag-encoding-tasks")
.desc("Flag if the output tasks/events should be encoded")
.type(Boolean.class)
.build()
);
return options;
}
} | 11,516 | 46.9875 | 323 | java |
Janus | Janus-master/src/minerful/logmaker/MinerFulLogMaker.java | package minerful.logmaker;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import minerful.automaton.AutomatonRandomWalker;
import minerful.automaton.utils.AutomatonUtils;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.logmaker.params.LogMakerParameters;
import minerful.utils.MessagePrinter;
import org.deckfour.xes.classification.XEventNameClassifier;
import org.deckfour.xes.extension.std.XConceptExtension;
import org.deckfour.xes.extension.std.XLifecycleExtension;
import org.deckfour.xes.extension.std.XTimeExtension;
import org.deckfour.xes.factory.XFactory;
import org.deckfour.xes.factory.XFactoryBufferedImpl;
import org.deckfour.xes.model.XEvent;
import org.deckfour.xes.model.XLog;
import org.deckfour.xes.model.XTrace;
import org.deckfour.xes.out.XMxmlSerializer;
import org.deckfour.xes.out.XesXmlSerializer;
import dk.brics.automaton.Automaton;
/**
* Generates a log out of a MINERful declarative process model.
* @author Claudio Di Ciccio
*/
public class MinerFulLogMaker {
/**
* Log generation parameters
*/
private LogMakerParameters parameters;
/**
* Event log
*/
private XLog log;
/**
* Event log as strings
*/
private String[] stringsLog;
/**
* Maximum amount of traces we want to save as strings
*/
public static int MAX_SIZE_OF_STRINGS_LOG = Integer.MAX_VALUE;
/**
* For debugging purposes
*/
public static MessagePrinter logger = MessagePrinter.getInstance(MinerFulLogMaker.class);
public MinerFulLogMaker(LogMakerParameters parameters) throws IllegalArgumentException {
this.setParameters(parameters);
}
public void setParameters(LogMakerParameters parameters) {
String errors = parameters.checkValidity();
if (errors != null)
throw new IllegalArgumentException(errors);
this.parameters = parameters;
this.stringsLog = new String[(parameters.tracesInLog < MAX_SIZE_OF_STRINGS_LOG ?
Integer.parseInt(String.valueOf(parameters.tracesInLog)) :
MAX_SIZE_OF_STRINGS_LOG)];
}
/**
* Generates an event log based on a MINERful process model. To do so, it
* extracts an automaton out of the declarative process model. Every finite
* random walk on it generates a trace. Every trace is included in the
* returned event log. The minimum and maximum length of the trace, as well
* as the number of traces to be generated, are specified in
* {@link #parameters parameters}.
* @param processModel The process model that the generated event log complies to
* @return The generated event log
*/
public XLog createLog(ProcessModel processModel) {
XFactory xFactory = new XFactoryBufferedImpl();
this.log = xFactory.createLog();
XTrace xTrace = null;
XEvent xEvent = null;
XConceptExtension concExtino = XConceptExtension.instance();
XLifecycleExtension lifeExtension = XLifecycleExtension.instance();
XTimeExtension timeExtension = XTimeExtension.instance();
this.log.getExtensions().add(concExtino);
this.log.getExtensions().add(lifeExtension);
this.log.getExtensions().add(timeExtension);
this.log.getClassifiers().add(new XEventNameClassifier());
concExtino.assignName(this.log,
"Synthetic log for process: " + processModel.getName()
);
lifeExtension.assignModel(this.log, XLifecycleExtension.VALUE_MODEL_STANDARD);
Automaton automaton = processModel.buildAutomaton();
automaton = AutomatonUtils.limitRunLength(automaton, this.parameters.minEventsPerTrace, this.parameters.maxEventsPerTrace);
AutomatonRandomWalker walker = new AutomatonRandomWalker(automaton);
TaskChar firedTransition = null;
Character pickedTransitionChar = 0;
Date currentDate = null;
int padder = (int)(Math.ceil(Math.log10(this.parameters.tracesInLog)));
String traceNameTemplate = "Synthetic trace no. " + (padder < 1 ? "" : "%0" + padder) + "d";
StringBuffer sBuf = new StringBuffer();
for (int traceNum = 0; traceNum < this.parameters.tracesInLog; traceNum++) {
sBuf.append("<");
walker.goToStart();
xTrace = xFactory.createTrace();
concExtino.assignName(
xTrace,
String.format(traceNameTemplate, (traceNum))
);
pickedTransitionChar = walker.walkOn();
while (pickedTransitionChar != null) {
firedTransition = processModel.getTaskCharArchive().getTaskChar(pickedTransitionChar);
if (traceNum < MAX_SIZE_OF_STRINGS_LOG) {
sBuf.append(firedTransition + ",");
}
currentDate = generateRandomDateTimeForLogEvent(currentDate);
xEvent = makeXEvent(xFactory, concExtino, lifeExtension, timeExtension, firedTransition, currentDate);
xTrace.add(xEvent);
pickedTransitionChar = walker.walkOn();
}
this.log.add(xTrace);
if (traceNum < MAX_SIZE_OF_STRINGS_LOG) {
this.stringsLog[traceNum] = sBuf.substring(0, Math.max(1, sBuf.length() -1)) + ">";
sBuf = new StringBuffer();
}
}
return this.log;
}
/**
* Stores the generated event log, {@link #log log}, in the file specified in
* {@link #parameters parameters}.
* @return The file in which the event log has been stored
* @throws IOException
*/
public File storeLog() throws IOException {
checkParametersForLogEncoding();
if (this.parameters.outputLogFile == null)
throw new IllegalStateException("Output file not specified in given parameters");
File outFile = this.parameters.outputLogFile;
OutputStream outStream = new FileOutputStream(outFile);
this.printEncodedLogInStream(outStream);
outStream.flush();
outStream.close();
return outFile;
}
/**
* Prints the generated event log, {@link #log log}.
* @return The print-out of the event log
* @throws IOException
*/
public String printEncodedLog() throws IOException {
checkParametersForLogEncoding();
OutputStream outStream = new ByteArrayOutputStream();
this.printEncodedLogInStream(outStream);
outStream.flush();
outStream.close();
return outStream.toString();
}
/**
* Return the String event log without commas and r/langle
*
* @return Event log as array of Strings
*/
public String[] getCleanStringsLog() {
String[] clean = new String[stringsLog.length];
for (int i = 0; i < stringsLog.length; i++) {
clean[i] = stringsLog[i].replace(",", "").replace("<", "").replace(">", "");
}
return clean;
}
/**
* Prints the generated event log, {@link #log log}, in the specified output stream.
* @return The print-out of the event log
* @throws IOException
*/
private boolean printEncodedLogInStream(OutputStream outStream) throws IOException {
switch(this.parameters.outputEncoding) {
case xes:
new XesXmlSerializer().serialize(this.log, outStream);
break;
case mxml:
new XMxmlSerializer().serialize(this.log, outStream);
break;
case strings:
PrintWriter priWri = new PrintWriter(outStream);
for (String stringTrace : this.stringsLog) {
priWri.println(stringTrace);
// MessagePrinter.printlnOut(stringTrace); // Print in terminal the log. Temporarily disabled.
}
priWri.flush();
priWri.close();
break;
default:
outStream.flush();
outStream.close();
throw new UnsupportedOperationException("Support for this encoding is still work-in-progress");
}
return true;
}
/**
* Checks that {@link #parameters parameters} and {@link #log log} are in a
* correct state for generating the event log. In case the check fails, an
* exception is fired.
* @throws IllegalArgumentException
*/
private void checkParametersForLogEncoding() throws IllegalArgumentException {
if (this.log == null)
throw new IllegalStateException("Log not yet generated");
if (this.parameters.outputEncoding == null)
throw new IllegalStateException("Output encoding not specified in given parameters");
}
/**
* Creates an event for the event log
*/
private XEvent makeXEvent(XFactory xFactory, XConceptExtension concExtino,
XLifecycleExtension lifeExtension, XTimeExtension timeExtension,
TaskChar firedTransition, Date currentDate) {
XEvent xEvent = xFactory.createEvent();
concExtino.assignName(xEvent, firedTransition.toString());
lifeExtension.assignStandardTransition(xEvent, XLifecycleExtension.StandardModel.COMPLETE);
timeExtension.assignTimestamp(xEvent, currentDate);
return xEvent;
}
/**
* Generates a random date and time for a log event.
* @return A random date and time for the log event.
*/
private Date generateRandomDateTimeForLogEvent() {
return generateRandomDateTimeForLogEvent(null);
}
/**
* Generates a random date and time for a log event, no sooner than the
* provided parameter.
*
* @param laterThan The date and time with respect to which the generated time stamp must be later
* @return A random date and time for the log event
*/
private Date generateRandomDateTimeForLogEvent(Date laterThan) {
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
if (laterThan == null) {
cal.add(GregorianCalendar.YEAR, -1);
cal.add(GregorianCalendar.MONTH, (int) ( Math.round(Math.random() * 12 )) * -1 );
cal.add(GregorianCalendar.WEEK_OF_MONTH, (int) ( Math.round(Math.random() * 4 )) * -1 );
cal.add(GregorianCalendar.DAY_OF_WEEK, (int) ( Math.round(Math.random() * 7 )) * -1 );
laterThan = cal.getTime();
}
long
randomAdditionalTime = (long) (Math.round(Math.random() * TimeUnit.DAYS.toMillis(1)));
cal.setTimeInMillis(laterThan.getTime() + randomAdditionalTime);
return cal.getTime();
}
} | 9,750 | 32.97561 | 125 | java |
Janus | Janus-master/src/minerful/logmaker/XesLogTracesSorter.java | package minerful.logmaker;
import java.io.File;
import java.io.FileOutputStream;
import java.util.Comparator;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import org.deckfour.xes.extension.std.XConceptExtension;
import org.deckfour.xes.extension.std.XTimeExtension;
import org.deckfour.xes.in.XesXmlGZIPParser;
import org.deckfour.xes.in.XesXmlParser;
import org.deckfour.xes.model.XAttribute;
import org.deckfour.xes.model.XAttributeLiteral;
import org.deckfour.xes.model.XEvent;
import org.deckfour.xes.model.XLog;
import org.deckfour.xes.model.XTrace;
import org.deckfour.xes.model.impl.XAttributeLiteralImpl;
import org.deckfour.xes.out.XesXmlSerializer;
import minerful.logmaker.params.SortingCriterion;
import minerful.utils.MessagePrinter;
public class XesLogTracesSorter implements Comparator<XTrace> {
public SortingCriterion[] sortingCriteria;
public XesLogTracesSorter(SortingCriterion... sortingCriteria) {
this.sortingCriteria = sortingCriteria;
}
@Override
public int compare(XTrace x1, XTrace x2) {
if (x1.equals(x2))
return 0;
if (x1.size() == 0 && x2.size() == 0)
return ( (x1.hashCode() < x2.hashCode()) ? -1 : 1 );
if (x1.size() == 0)
return 1;
if (x2.size() == 0)
return -1;
int result = 0;
for (int i = 0; i < sortingCriteria.length && result == 0; i++) {
SortingCriterion sortingCriterion = sortingCriteria[i];
switch (sortingCriterion) {
case FIRST_EVENT_ASC:
result = compareByFirstEventAsc(x1,x2);
break;
case LAST_EVENT_ASC:
result = compareByLastEventAsc(x1, x2);
break;
case TRACE_LENGTH_ASC:
result = compareByLengthAsc(x1, x2);
break;
case TRACE_LENGTH_DESC:
result = compareByLengthDesc(x1, x2);
break;
default:
throw new UnsupportedOperationException(String.format("Sorting criterion %s not yet implemented", sortingCriterion));
}
}
if ( result == 0 ) {
result = compareByHash(x1, x2);
}
return result;
}
public int compareByHash(XTrace x1, XTrace x2) {
int hashCode1 = x1.hashCode(), hashCode2 = x2.hashCode();
if (hashCode1 == hashCode2)
return 0;
return ( (hashCode1 < hashCode2) ? -1 : 1 );
}
public int compareByFirstEventAsc(XTrace x1, XTrace x2) {
XEvent
evt1 = x1.get(0),
evt2 = x2.get(0);
return compareEventsByTimestampAsc(evt1, evt2);
}
public int compareByLastEventAsc(XTrace x1, XTrace x2) {
XEvent
evt1 = x1.get(x1.size()-1),
evt2 = x2.get(x2.size()-1);
return compareEventsByTimestampAsc(evt1, evt2);
}
public int compareByLengthAsc(XTrace x1, XTrace x2) {
return Integer.compare(x1.size(), x2.size());
}
public int compareByLengthDesc(XTrace x1, XTrace x2) {
return compareByLengthAsc(x1,x2) * -1;
}
private int compareEventsByTimestampAsc(XEvent evt1, XEvent evt2) {
int result = 0;
XAttribute
ext1cmpVal = evt1.getAttributes().get(XTimeExtension.KEY_TIMESTAMP),
ext2cmpVal = evt2.getAttributes().get(XTimeExtension.KEY_TIMESTAMP);
result = ext1cmpVal.compareTo(ext2cmpVal);
return result;
}
public void renameEventLog(XLog evtLog) {
String nameSortingSuffix = String.format("Event log sorted by %s", MessagePrinter.printValues(sortingCriteria));
XAttribute logName = evtLog.getAttributes().get(XConceptExtension.KEY_NAME);
XAttributeLiteral logNameString = null;
if (logName != null) {
logNameString = (XAttributeLiteral)logName;
logNameString.setValue(logNameString.getValue() + " -- " + nameSortingSuffix);
}
else {
logNameString = new XAttributeLiteralImpl(
XConceptExtension.KEY_NAME,
nameSortingSuffix);
}
evtLog.getAttributes().put(XConceptExtension.KEY_NAME, logNameString);
System.out.println(evtLog.getAttributes().get(XConceptExtension.KEY_NAME));
}
public XLog sortXesLog(XLog xLog) {
SortedSet<XTrace> sorTraces = new TreeSet<XTrace>(this);
while (xLog.size() > 0) {
// Adding in the sorted set the next trace (removed from the original list)
sorTraces.add(xLog.remove(0));
}
// Adding traces again, according to the desired order
for (XTrace sorTrace : sorTraces) {
xLog.add(sorTrace);
}
return xLog;
}
/**
* For debugging purposes only
*/
public static void main(String[] args) throws Exception {
if (args.length < 2) {
System.err.println("Usage: java " + XesLogTracesSorter.class.getName() + " <xes-file-in> <xes-file-out> <sort-by>[;<sort-by>...]");
System.err.println("sort-by: either of " + MessagePrinter.printValues(SortingCriterion.values()));
System.exit(1);
}
File xesFileIn = new File(args[0]);
File xesFileOut = new File(args[1]);
// Setting up the right XesParser
XesXmlParser parser = new XesXmlParser();
if (!parser.canParse(xesFileIn)) {
parser = new XesXmlGZIPParser();
if (!parser.canParse(xesFileIn)) {
throw new IllegalArgumentException("Unparsable log file: " + xesFileIn.getAbsolutePath());
}
}
List<XLog> xLogs = parser.parse(xesFileIn);
SortingCriterion[] criteria = new SortingCriterion[]{
SortingCriterion.LAST_EVENT_ASC, SortingCriterion.FIRST_EVENT_ASC
};
XesLogTracesSorter trSort = new XesLogTracesSorter(criteria);
XLog evtLog = trSort.sortXesLog(xLogs.get(0));
// Rename the event log
trSort.renameEventLog(evtLog);
new XesXmlSerializer().serialize(evtLog, new FileOutputStream(xesFileOut));
System.exit(0);
}
}
| 5,557 | 29.707182 | 134 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/AbstractErrorInjector.java | package minerful.logmaker.errorinjector;
/**
* @(#)ErrorInjector.java
*
*
* @author S. Simoncini, C. Di Ciccio
* @version 1.5 2012/8/28
*/
import org.apache.log4j.Logger;
abstract class AbstractErrorInjector implements ErrorInjector, IErrorInjector {
protected static Logger logger = Logger.getLogger(AbstractErrorInjector.class.getCanonicalName());
protected StringBuffer[] testBed;
protected double errorsInjectionPercentage;
protected Character targetChar;
protected Character[] alphabet;
AbstractErrorInjector(String[] testBedArray) {
this.setTestBed(testBedArray);
}
StringBuffer[] getTestBed() {
return testBed;
}
void setTestBed(String[] testBedArray) {
this.testBed = new StringBuffer[testBedArray.length];
for (int i = 0; i < testBedArray.length; i++) {
this.testBed[i] = new StringBuffer(testBedArray[i]);
}
}
@Override
public double getErrorsInjectionPercentage() {
return errorsInjectionPercentage;
}
@Override
public void setErrorsInjectionPercentage(double errorsInjectionPercentage) {
this.errorsInjectionPercentage = errorsInjectionPercentage;
}
@Override
public Character getTargetChar() {
return targetChar;
}
@Override
public void setTargetChar(Character targetChar) {
this.targetChar = targetChar;
}
@Override
public void unsetTargetChar(Character targetChar) {
this.targetChar = null;
}
@Override
public Character[] getAlphabet() {
return alphabet;
}
@Override
public void setAlphabet(Character[] alphabet) {
this.alphabet = alphabet;
}
@Override
public boolean isThereAnyTargetCharacter() {
return this.targetChar != null;
}
protected String[] testBedArray() {
String[] testBedArray = new String[this.testBed.length];
int i = 0;
for (StringBuffer sBuffer : this.testBed) {
testBedArray[i++] = sBuffer.toString();
}
return testBedArray;
}
} | 1,867 | 23.906667 | 99 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/AbstractErrorInjectorByMixImpl.java | package minerful.logmaker.errorinjector;
public abstract class AbstractErrorInjectorByMixImpl extends AbstractErrorInjector {
AbstractErrorInjectorByMixImpl(String[] testBedArray) {
super(testBedArray);
}
protected String[] applyErrorsInjectionPhase(ErrorInjector errorInjector, double percentage) {
errorInjector.setAlphabet(alphabet);
errorInjector.setErrorsInjectionPercentage(percentage);
if (this.isThereAnyTargetCharacter())
errorInjector.setTargetChar(targetChar);
return errorInjector.injectErrors();
}
} | 543 | 31 | 95 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/AbstractErrorInjectorImpl.java | package minerful.logmaker.errorinjector;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public abstract class AbstractErrorInjectorImpl extends AbstractErrorInjector {
abstract List<TargetDataStructure> executeErrorInjection(
double errorInjectionTargetProportionalIndex,
char injectableChar,
List<TargetDataStructure> targets);
protected abstract List<List<TestBedCandidate>> decideErrorInjectionPoints();
abstract List<List<TargetDataStructure>> prepareTargets();
AbstractErrorInjectorImpl(String[] testBedArray) {
super(testBedArray);
}
/**
* Generates a random number, e.g., used to decide where to make a
* modification in the string. The random number can range from 1 up to the
* given upper bound (excluded).
*
* @param upperBound
* The upper bound for the random number. It must be greater
* or equal than 0.
* @return The random number.
*/
protected int decideBoundedRandom(int upperBound) {
if (upperBound < 0)
throw new IllegalArgumentException(
"Invalid upper bound: " + upperBound);
int pos = this.applyAndRound(Math.random(), upperBound);
return pos;
}
protected int applyAndRound(double value, int number) {
return (int)StrictMath.round(value * number);
}
protected char decideRandomChar() {
return this.alphabet[this.decideBoundedRandom(this.alphabet.length-1)];
}
protected List<Integer> findOccurrences(int indexOfTheStringToScan, char targetCharacter) {
int k = this.testBed[indexOfTheStringToScan].indexOf(
String.valueOf(targetCharacter)
);
logger.trace("Searching occurrences of "
+ targetCharacter
+ " into "
+ this.testBed[indexOfTheStringToScan]
+ "... ");
List<Integer> occurrences = new ArrayList<Integer>();
while (k > -1) {
occurrences.add(k);
k = this.testBed[indexOfTheStringToScan].indexOf(String.valueOf(targetCharacter), k+1);
}
logger.trace(occurrences.size());
return occurrences;
}
protected int countOccurrences(int indexOfTheStringToScan, char targetCharacter) {
int occurrences = 0,
k = this.testBed[indexOfTheStringToScan].indexOf(
String.valueOf(targetCharacter)
);
logger.trace("Counting occurrences of "
+ targetCharacter
+ " into "
+ this.testBed[indexOfTheStringToScan]
+ "... ");
while (k > -1) {
occurrences++;
k = this.testBed[indexOfTheStringToScan].indexOf(String.valueOf(targetCharacter), k+1);
}
logger.trace(occurrences);
return occurrences;
}
protected int countOccurrences(char targetCharacter) {
int occurrences = 0;
for (int i = 0; i < this.testBed.length; i++) {
occurrences += this.countOccurrences(i, targetCharacter);
}
return occurrences;
}
protected int countOccurrences() {
int amount = 0;
for (int i = 0; i < this.testBed.length; i++) {
amount += this.testBed[i].length();
}
return amount;
}
protected int applyErrorInjectionPercentage(int number) {
double rawPercentageApplication = number * this.errorsInjectionPercentage / 100.0;
if ( StrictMath.ceil(rawPercentageApplication)
!=
StrictMath.floor(rawPercentageApplication)
) {
boolean preferFloorValue = ( this.applyAndRound(Math.random(), 1) == 1);
return (int)(
preferFloorValue ?
StrictMath.floor(rawPercentageApplication) :
StrictMath.ceil(rawPercentageApplication)
);
}
return (int)StrictMath.round(rawPercentageApplication);
}
@Override
public String[] injectErrors() {
return this.executeErrorInjection(
this.decideErrorInjectionPoints(),
this.prepareTargets());
}
protected String[] executeErrorInjection(
List<List<TestBedCandidate>> errorInjectionPoints,
List<List<TargetDataStructure>> targets) {
logger.trace("errorInjectionPoints.size() = " + errorInjectionPoints.size());
logger.trace("targets.size() = " + targets.size());
if (errorInjectionPoints.size() != targets.size())
throw new IllegalArgumentException("Error injection points and targets are not sized the same! " +
"They must be long the same.");
Iterator<List<TestBedCandidate>> errorInjIterator = errorInjectionPoints.iterator();
Iterator<List<TargetDataStructure>> targetIterator = targets.iterator();
List<TestBedCandidate> errorInjectionPointsInString = null;
List<TargetDataStructure> targetsInString = null;
// If there is a target character, you have to insert it, and only it.
Character injectableChar = (
isThereAnyTargetCharacter() ?
this.getTargetChar() :
null);
while (errorInjIterator.hasNext()) {
errorInjectionPointsInString = errorInjIterator.next();
targetsInString = targetIterator.next();
// Apply the error
for (TestBedCandidate errorInjection : errorInjectionPointsInString) {
// If there is not a target character, you have to decide one at each loop.
if (!isThereAnyTargetCharacter()) {
injectableChar = this.decideRandomChar();
}
// Now, you can insert the error.
logger.trace("Length of target indexes, before: " + targetsInString.size());
//targetsInString = this.executeErrorInjection(injectedIndex, injectableChar, targetsInString);
this.executeErrorInjection(errorInjection.candidateProportionalIndex, injectableChar, targetsInString);
logger.trace("Length of target indexes, after: " + targetsInString.size());
}
}
return this.testBedArray();
}
} | 5,546 | 32.215569 | 107 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/AbstractErrorInjectorOverCollection.java | package minerful.logmaker.errorinjector;
import java.util.ArrayList;
import java.util.List;
abstract class AbstractErrorInjectorOverCollection extends AbstractErrorInjectorImpl {
public AbstractErrorInjectorOverCollection(String[] testBedArray) {
super(testBedArray);
}
@Override
protected List<List<TestBedCandidate>> decideErrorInjectionPoints() {
int numOfErrors = 0,
upperBound = 0;
List<List<TestBedCandidate>> errorInjectionPointsCollector = new ArrayList<List<TestBedCandidate>>(1);
List<TestBedCandidate> errorInjectionPoints = new ArrayList<TestBedCandidate>();
// If there was a target character to insert/remove…
if (this.isThereAnyTargetCharacter()) {
// … the upper bound for the errors you can inject is given by the number of occurrences of it in the whole collection
upperBound = this.countOccurrences(this.targetChar);
} else {
// … otherwise, the upper bound is the number of the characters appearing in the collection
upperBound = this.countOccurrences();
}
// Decide the number of errors to inject
numOfErrors = this.applyErrorInjectionPercentage(upperBound);
// Until you have not counted all of the errors you decided to inject…
while (numOfErrors-- > 0) {
// … keep on putting entries in the error injection data structure
errorInjectionPoints.add(
new TestBedCandidate(
Math.random()
)
);
}
errorInjectionPointsCollector.add(errorInjectionPoints);
return errorInjectionPointsCollector;
}
} | 1,499 | 34.714286 | 121 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/AbstractErrorInjectorOverStrings.java | package minerful.logmaker.errorinjector;
import java.util.ArrayList;
import java.util.List;
public abstract class AbstractErrorInjectorOverStrings extends AbstractErrorInjectorImpl {
public AbstractErrorInjectorOverStrings(String[] testBedArray) {
super(testBedArray);
}
@Override
protected List<List<TestBedCandidate>> decideErrorInjectionPoints() {
int numOfErrors = 0,
upperBound = 0;
List<List<TestBedCandidate>> errorInjectionPointsCollector = new ArrayList<List<TestBedCandidate>>(
this.testBed.length);
List<TestBedCandidate> errorInjectionPoints = null;
logger.trace("Error injection points are being decided...");
// For each string in the testbed
for (int i = 0; i < this.testBed.length; i++) {
errorInjectionPoints = new ArrayList<TestBedCandidate>();
// If there was a target character to insert/remove…
if (this.isThereAnyTargetCharacter()) {
// … the upper bound for the errors you can inject is given by the number of occurrences of it in the current string
upperBound = this.countOccurrences(i, this.targetChar);
} else {
// … otherwise, the upper bound is the length of the string itself
upperBound = this.testBed[i].length();
}
// Decide the number of errors to inject
numOfErrors = this.applyErrorInjectionPercentage(upperBound);
if (this.isThereAnyTargetCharacter())
logger.trace(numOfErrors + " errors are being injected in string " + this.testBed[i] + ", which has " + upperBound + " " + this.targetChar + "'s in.");
else
logger.trace(numOfErrors + " errors are being injected in string " + this.testBed[i] + ", which is " + this.testBed[i].length() + " chr's long.");
// Until you have not counted all of the errors you decided to inject…
while (numOfErrors-- > 0) {
// … keep on putting entries in the error injection data structure
errorInjectionPoints.add(
new TestBedCandidate(
Math.random()
)
);
}
errorInjectionPointsCollector.add(errorInjectionPoints);
}
logger.trace("Error injection points have been decided.");
return errorInjectionPointsCollector;
}
} | 2,154 | 35.525424 | 155 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/ErrorInjector.java | package minerful.logmaker.errorinjector;
public interface ErrorInjector {
enum SpreadingPolicy {
/**
* Inject the errors calculating percentages over each single string. I.e.,
* if the percentage is equal to 10% then 1 character over 10 FOR EACH
* STRING will be affected by the error.
* It can be a valid value for the "-eS" parameter.
*/
string,
/**
* Inject the errors calculating percentages over the whole collection of
* strings. I.e., if the percentage is equal to 10% then 1 character over 10
* OVER THE WHOLE COLLECTION will be affected by the error.
* It can be a valid value for the "-eS" parameter.
* It is the DEFAULT value for the "-eS" parameter.
*/
collection;
public static SpreadingPolicy getDefault() { return collection; }
}
enum ErrorType {
/**
* Errors are insertions of spurious characters.
* It can be a valid value for the "-eT" parameter.
*/
ins,
/**
* Errors are deletions of characters.
* It can be a valid value for the "-eT" parameter.
*/
del,
/**
* Errors are either insertions or deletions of characters, as based on a random decision.
* It can be a valid value for the "-eT" parameter.
* It is the DEFAULT value for the "-eS" parameter.
*/
insdel;
public static ErrorType getDefault() { return insdel; }
}
String[] injectErrors();
double getErrorsInjectionPercentage();
void setErrorsInjectionPercentage(double errorsInjectionPercentage);
Character getTargetChar();
void setTargetChar(Character targetChar);
void unsetTargetChar(Character targetChar);
Character[] getAlphabet();
void setAlphabet(Character[] alphabet);
boolean isThereAnyTargetCharacter();
} | 1,709 | 26.580645 | 92 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/ErrorInjectorFactory.java | package minerful.logmaker.errorinjector;
import org.apache.log4j.Logger;
import minerful.logmaker.errorinjector.ErrorInjector.ErrorType;
import minerful.logmaker.errorinjector.ErrorInjector.SpreadingPolicy;
public class ErrorInjectorFactory {
private static Logger logger = Logger.getLogger(ErrorInjectorFactory.class.getCanonicalName());
public ErrorInjector createErrorInjector(SpreadingPolicy policy, ErrorType type, String[] testBedArray) {
logger.trace("\"" + type + "-over-" + policy + "\" error injection requested, on a " + testBedArray.length + " strings long collection");
switch (policy) {
case string:
switch (type) {
case ins:
return new ErrorInjectorOverStringsByInsertion(testBedArray);
case del:
return new ErrorInjectorOverStringsByDeletion(testBedArray);
case insdel:
return new ErrorInjectorOverStringsByMixInsDel(testBedArray);
default:
break;
}
case collection:
switch (type) {
case ins:
return new ErrorInjectorOverCollectionByInsertion(testBedArray);
case del:
return new ErrorInjectorOverCollectionByDeletion(testBedArray);
case insdel:
return new ErrorInjectorOverCollectionByMixInsDel(testBedArray);
default:
break;
}
default:
break;
}
throw new UnsupportedOperationException("The \"" + type + "-over-" + policy + "\" error injection is not provided, yet");
}
} | 1,386 | 31.255814 | 139 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/ErrorInjectorOverCollectionByDeletion.java | package minerful.logmaker.errorinjector;
import java.util.ArrayList;
import java.util.List;
public class ErrorInjectorOverCollectionByDeletion extends AbstractErrorInjectorOverCollection {
public ErrorInjectorOverCollectionByDeletion(String[] testBedArray) {
super(testBedArray);
}
@Override
protected List<List<TargetDataStructure>> prepareTargets() {
List<List<TargetDataStructure>> targets = new ArrayList<List<TargetDataStructure>>(1);
List<TargetDataStructure> targetsInString = null;
if (!isThereAnyTargetCharacter()) {
targetsInString = new ArrayList<TargetDataStructure>(super.countOccurrences());
} else {
targetsInString = new ArrayList<TargetDataStructure>();
}
logger.trace("Targets for error injection are being prepared...");
int stringsCounter = 0;
if (isThereAnyTargetCharacter()) {
for (; stringsCounter < testBed.length; stringsCounter++) {
// Beware: you can not remove a character which is not already in the string!
List<Integer> occurrences = super.findOccurrences(
stringsCounter,
this.targetChar);
if (occurrences.size() > 1) {
for (Integer occurrence : occurrences) {
targetsInString.add(
new TargetDataStructure(
stringsCounter,
occurrence
)
);
}
}
}
}
else {
for (StringBuffer testString : testBed) {
for (int charCounter = 0; charCounter < testString.length() -1; charCounter++) {
targetsInString.add(
new TargetDataStructure(
stringsCounter,
charCounter
)
);
}
stringsCounter++;
}
}
targets.add(targetsInString);
logger.trace("Targets for error injection are ready.");
return targets;
}
@Override
protected List<TargetDataStructure> executeErrorInjection(
double errorInjectionTargetProportionalIndex,
char injectableChar, List<TargetDataStructure> targets) {
if (targets.size() == 0) {
logger.trace("No " + injectableChar + " character to delete");
return targets;
}
int injectedIndex = this.applyAndRound(
errorInjectionTargetProportionalIndex,
targets.size() -1
),
injectedStringNumber = targets.get(injectedIndex).stringNumber;
logger.trace("Error injection: deleting " + injectableChar + " in position " + targets.get(injectedIndex).index + " of " + this.testBed[injectedStringNumber]);
this.testBed[injectedStringNumber].deleteCharAt(
targets.get(injectedIndex).index
);
// Beware: you just removed a char from the string!
targets.remove(injectedIndex);
// All the following characters in the same string must turn their index reduced by 1
for ( int i = injectedIndex;
i < targets.size()
&& injectedStringNumber == targets.get(i).stringNumber;
i++) {
logger.trace("Moving index " + targets.get(i).index + " of string \"" + this.testBed[targets.get(i).stringNumber] + "\" to " + (targets.get(i).index - 1));
targets.get(i).index--;
}
return targets;
}
} | 2,996 | 29.272727 | 161 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/ErrorInjectorOverCollectionByInsertion.java | package minerful.logmaker.errorinjector;
import java.util.ArrayList;
import java.util.List;
public class ErrorInjectorOverCollectionByInsertion extends AbstractErrorInjectorOverCollection {
public ErrorInjectorOverCollectionByInsertion(String[] testBedArray) {
super(testBedArray);
}
@Override
protected List<List<TargetDataStructure>> prepareTargets() {
List<List<TargetDataStructure>> targets = new ArrayList<List<TargetDataStructure>>(1);
List<TargetDataStructure> targetsInString = null;
if (!isThereAnyTargetCharacter()) {
targetsInString = new ArrayList<TargetDataStructure>(super.countOccurrences() + testBed.length);
} else {
targetsInString = new ArrayList<TargetDataStructure>();
}
logger.trace("Targets for error injection are being prepared...");
int stringsCounter = 0;
for (StringBuffer testString : testBed) {
// Differently from the over-string policy, the over-collection insertion should be able to insert characters also in empty strings.
for (int charCounter = 0; charCounter <= testString.length(); charCounter++) {
targetsInString.add(
new TargetDataStructure(
stringsCounter,
charCounter
)
);
}
stringsCounter++;
}
targets.add(targetsInString);
logger.trace("Targets for error injection are ready.");
return targets;
}
@Override
protected List<TargetDataStructure> executeErrorInjection(
double errorInjectionTargetProportionalIndex,
char injectableChar, List<TargetDataStructure> targets) {
int injectedIndex = this.applyAndRound(
errorInjectionTargetProportionalIndex,
targets.size() -1
),
injectedStringNumber = targets.get(injectedIndex).stringNumber;;
logger.trace("Error injection: inserting " + injectableChar + " in position " + targets.get(injectedIndex).index + " of " + this.testBed[injectedStringNumber]);
this.testBed[targets.get(injectedIndex).stringNumber].insert(
targets.get(injectedIndex).index,
injectableChar
);
// Beware: you just inserted a char into the string: thus, the range of possible insertions raises, by 1, in the same string.
targets.add(
injectedIndex,
new TargetDataStructure(
injectedStringNumber,
targets.get(injectedIndex).index
)
);
for ( int i = injectedIndex+1;
i < targets.size()
&& targets.get(i).stringNumber == injectedStringNumber;
i++) {
logger.trace("Moving index " + targets.get(i).index + " of string \"" + this.testBed[targets.get(i).stringNumber] + "\" to " + (targets.get(i).index + 1));
targets.get(i).index++;
}
return targets;
}
}
| 2,623 | 31.8 | 162 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/ErrorInjectorOverCollectionByMixInsDel.java | package minerful.logmaker.errorinjector;
public class ErrorInjectorOverCollectionByMixInsDel extends AbstractErrorInjectorByMixImpl {
public ErrorInjectorOverCollectionByMixInsDel(String[] testBedArray) {
super(testBedArray);
}
@Override
public String[] injectErrors() {
String[] alteredTestBedArray = null;
double
insertionErrorsInjectionPercentage =
Math.random() * errorsInjectionPercentage,
deletionErrorsInjectionPercentage =
errorsInjectionPercentage - insertionErrorsInjectionPercentage;
// Phase 1: apply insertion errors over strings
AbstractErrorInjectorOverCollection errorInjex =
new ErrorInjectorOverCollectionByInsertion(
super.testBedArray());
alteredTestBedArray =
this.applyErrorsInjectionPhase(
errorInjex,
errorsInjectionPercentage);
// Phase 2: apply deletion errors over strings
errorInjex =
new ErrorInjectorOverCollectionByDeletion(
alteredTestBedArray);
alteredTestBedArray =
this.applyErrorsInjectionPhase(
errorInjex,
deletionErrorsInjectionPercentage);
return alteredTestBedArray;
}
} | 1,142 | 29.078947 | 92 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/ErrorInjectorOverStringsByDeletion.java | package minerful.logmaker.errorinjector;
import java.util.ArrayList;
import java.util.List;
public class ErrorInjectorOverStringsByDeletion extends AbstractErrorInjectorOverStrings {
public ErrorInjectorOverStringsByDeletion(String[] testBedArray) {
super(testBedArray);
}
@Override
protected List<List<TargetDataStructure>> prepareTargets() {
List<List<TargetDataStructure>> targets = new ArrayList<List<TargetDataStructure>>(this.testBed.length);
List<TargetDataStructure> targetsInString = null;
logger.trace("Targets for error injection are being prepared...");
int stringsCounter = 0;
if (isThereAnyTargetCharacter()) {
for (; stringsCounter < testBed.length; stringsCounter++) {
targetsInString = new ArrayList<IErrorInjector.TargetDataStructure>();
// Beware: you can not remove a character which is not already in the string!
List<Integer> occurrences = super.findOccurrences(
stringsCounter,
this.targetChar);
if (occurrences.size() > 0) {
for (Integer occurrence : occurrences) {
logger.trace("Adding occurrence " + occurrence + " in string " + this.testBed[stringsCounter]);
targetsInString.add(
new TargetDataStructure(
stringsCounter,
occurrence
)
);
}
}
targets.add(targetsInString);
}
}
else {
for (StringBuffer testString : testBed) {
targetsInString = new ArrayList<IErrorInjector.TargetDataStructure>();
for (int charCounter = 0; charCounter < testString.length() -1; charCounter++) {
targetsInString.add(
new TargetDataStructure(
stringsCounter,
charCounter
)
);
}
stringsCounter++;
targets.add(targetsInString);
}
}
logger.trace("Targets for error injection are ready.");
return targets;
}
@Override
protected List<TargetDataStructure> executeErrorInjection(
double errorInjectionTargetProportionalIndex,
char injectableChar, List<TargetDataStructure> targetsInString) {
if (targetsInString.size() == 0) {
logger.trace("No " + injectableChar + " character to delete");
return targetsInString;
}
int injectedIndex = this.applyAndRound(
errorInjectionTargetProportionalIndex,
targetsInString.size() -1
);
logger.trace("Error injection: deleting " + injectableChar + " in position " + targetsInString.get(injectedIndex).index + " of " + this.testBed[targetsInString.get(injectedIndex).stringNumber]);
this.testBed[targetsInString.get(injectedIndex).stringNumber].deleteCharAt(
targetsInString.get(injectedIndex).index
);
// Beware: you just removed a char from the string!
targetsInString.remove(injectedIndex);
// All the following characters in the same string must turn their index reduced by 1
for ( int i = injectedIndex;
i < targetsInString.size();
i++) {
targetsInString.get(i).index--;
}
return targetsInString;
}
}
| 2,959 | 29.833333 | 196 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/ErrorInjectorOverStringsByInsertion.java | package minerful.logmaker.errorinjector;
import java.util.ArrayList;
import java.util.List;
public class ErrorInjectorOverStringsByInsertion extends AbstractErrorInjectorOverStrings {
public ErrorInjectorOverStringsByInsertion(String[] testBedArray) {
super(testBedArray);
}
@Override
List<List<TargetDataStructure>> prepareTargets() {
List<List<TargetDataStructure>> targets = new ArrayList<List<TargetDataStructure>>(this.testBed.length);
List<TargetDataStructure> targetsInString = null;
logger.trace("Targets for error injection are being prepared...");
int stringsCounter = 0;
for (StringBuffer testString : testBed) {
targetsInString = new ArrayList<IErrorInjector.TargetDataStructure>();
for (int charCounter = 0; charCounter < testString.length(); charCounter++) {
targetsInString.add(
new TargetDataStructure(
stringsCounter,
charCounter
)
);
}
stringsCounter++;
targets.add(targetsInString);
}
logger.trace("Targets for error injection are ready.");
return targets;
}
@Override
List<TargetDataStructure> executeErrorInjection(
double errorInjectionTargetProportionalIndex,
char injectableChar, List<TargetDataStructure> targetsInString) {
int injectedIndex = this.applyAndRound(
errorInjectionTargetProportionalIndex,
targetsInString.size() -1
);
logger.trace(
"Error injection: inserting "
+ injectableChar
+ " in position "
+ targetsInString.get(injectedIndex).index
+ " of "
+ this.testBed[targetsInString.get(injectedIndex).stringNumber]);
this.testBed[targetsInString.get(injectedIndex).stringNumber].insert(
targetsInString.get(injectedIndex).index,
injectableChar
);
// Beware: you just inserted a char into the string: thus, the range of possible insertions raises, by 1.
logger.trace("Adding the new \"last\" position (" + (targetsInString.size()+1) + ") in " + this.testBed[targetsInString.get(injectedIndex).stringNumber]);
targetsInString.add(
new TargetDataStructure(
targetsInString.get(injectedIndex).stringNumber,
targetsInString.size()+1
)
);
return targetsInString;
}
} | 2,198 | 30.414286 | 156 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/ErrorInjectorOverStringsByMixInsDel.java | package minerful.logmaker.errorinjector;
public class ErrorInjectorOverStringsByMixInsDel extends AbstractErrorInjectorByMixImpl {
public ErrorInjectorOverStringsByMixInsDel(String[] testBedArray) {
super(testBedArray);
}
@Override
public String[] injectErrors() {
String[] alteredTestBedArray = null;
double
insertionErrorsInjectionPercentage =
Math.random() * errorsInjectionPercentage,
deletionErrorsInjectionPercentage =
errorsInjectionPercentage - insertionErrorsInjectionPercentage;
// Phase 1: apply insertion errors over strings
AbstractErrorInjectorOverStrings errorInjex =
new ErrorInjectorOverStringsByInsertion(
super.testBedArray());
alteredTestBedArray =
this.applyErrorsInjectionPhase(
errorInjex,
errorsInjectionPercentage);
// Phase 2: apply deletion errors over strings
errorInjex =
new ErrorInjectorOverStringsByDeletion(
alteredTestBedArray);
alteredTestBedArray =
this.applyErrorsInjectionPhase(
errorInjex,
deletionErrorsInjectionPercentage);
return alteredTestBedArray;
}
} | 1,127 | 28.684211 | 89 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/IErrorInjector.java | package minerful.logmaker.errorinjector;
interface IErrorInjector {
class TargetDataStructure {
public final int stringNumber;
public int index;
public TargetDataStructure(int stringNumber, int index) {
this.stringNumber = stringNumber;
this.index = index;
}
}
class TestBedCandidate {
public final double candidateProportionalIndex;
public TestBedCandidate(double candidateProportionalIndex) {
this.candidateProportionalIndex = candidateProportionalIndex;
}
}
} | 499 | 21.727273 | 64 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/OldErrorInjector.java | package minerful.logmaker.errorinjector;
/**
* @(#)ErrorInjector.java
*
*
* @author S. Simoncini, C. Di Ciccio
* @version 1.5 2012/8/28
*/
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.log4j.Logger;
@Deprecated
public class OldErrorInjector {
public enum SpreadingPolicy {
/**
* Inject the errors calculating percentages over each single string. I.e.,
* if the percentage is equal to 10% then 1 character over 10 FOR EACH
* STRING will be affected by the error.
* It can be a valid value for the "-eS" parameter.
*/
string,
/**
* Inject the errors calculating percentages over the whole collection of
* strings. I.e., if the percentage is equal to 10% then 1 character over 10
* OVER THE WHOLE COLLECTION will be affected by the error.
* It can be a valid value for the "-eS" parameter.
* It is the DEFAULT value for the "-eS" parameter.
*/
collection;
public static SpreadingPolicy getDefault() { return collection; }
}
public enum ErrorType {
/**
* Errors are insertions of spurious characters.
* It can be a valid value for the "-eT" parameter.
*/
ins,
/**
* Errors are deletions of characters.
* It can be a valid value for the "-eT" parameter.
*/
del,
/**
* Errors are either insertions or deletions of characters, as based on a random decision.
* It can be a valid value for the "-eT" parameter.
* It is the DEFAULT value for the "-eS" parameter.
*/
insdel;
public static ErrorType getDefault() { return insdel; }
}
private static Logger logger = Logger.getLogger(OldErrorInjector.class.getCanonicalName());
private String[] testBedArray;
private SpreadingPolicy errorInjectionSpreadingPolicy;
private ErrorType errorType;
private double errorsInjectionPercentage;
private Character targetChar;
private Character[] alphabet;
private int totalChrs = 0;
public OldErrorInjector(String[] testBedArray) {
this.setTestBedArray(testBedArray);
}
private void updateCharsTotalInTestBed() {
this.totalChrs = 0;
for (String testString: testBedArray) {
this.totalChrs += testString.length();
}
}
public String[] getTestBedArray() {
return testBedArray;
}
public void setTestBedArray(String[] testBedArray) {
this.testBedArray = testBedArray;
this.updateCharsTotalInTestBed();
}
public SpreadingPolicy getErrorInjectionSpreadingPolicy() {
return errorInjectionSpreadingPolicy;
}
public void setErrorInjectionSpreadingPolicy(SpreadingPolicy errorInjectionSpreadingPolicy) {
this.errorInjectionSpreadingPolicy = errorInjectionSpreadingPolicy;
}
public ErrorType getErrorType() {
return errorType;
}
public void setErrorType(ErrorType errorType) {
this.errorType = errorType;
}
public double getErrorsInjectionPercentage() {
return errorsInjectionPercentage;
}
public void setErrorsInjectionPercentage(double errorsInjectionPercentage) {
this.errorsInjectionPercentage = errorsInjectionPercentage;
}
public Character getTargetChar() {
return targetChar;
}
public void setTargetChar(Character targetChar) {
this.targetChar = targetChar;
}
public Character[] getAlphabet() {
return alphabet;
}
public void setAlphabet(Character[] alphabet) {
this.alphabet = alphabet;
}
public int getTotalChrs() {
return totalChrs;
}
public boolean isThereAnyTargetCharacter() {
return this.targetChar != null;
}
/**
* Generates a random number, e.g., used to decide where to make a
* modification in the string. The random number can range from 1 up to the
* given upper bound.
*
* @param upperBound
* The upper bound for the random number. It must be greater
* than 0.
* @return The random number.
*/
private static int generateBoundedRandom(int upperBound) {
if (upperBound <= 0)
throw new IllegalArgumentException(
"Invalid upper bound: " + upperBound);
double x = Math.random() * upperBound;
int pos = (int)x;
return pos;
}
private static ArrayList<Integer> findTargetIndexes(String stringToScan, char targetCharacter) {
ArrayList<Integer> targetIndexes = new ArrayList<Integer>();
int k = stringToScan.indexOf(targetCharacter);
while (k > -1) {
targetIndexes.add(k);
k = stringToScan.indexOf(targetCharacter, k);
}
return targetIndexes;
}
public String[] injectErrors() {
switch (errorInjectionSpreadingPolicy) {
case string :
switch (errorType) {
case del:
if (isThereAnyTargetCharacter()) {
deleteGivenCharacterPerString(targetChar);
} else {
deleteRandomCharactersPerString();
}
break;
case ins:
if (isThereAnyTargetCharacter()) {
insertGivenCharacterPerString(targetChar);
} else {
insertRandomCharactersPerString();
}
break;
case insdel:
if (isThereAnyTargetCharacter()) {
insertOrDeleteGivenCharacterPerString(targetChar);
} else {
insertOrDeleteRandomCharactersPerString();
}
break;
}
break;
case collection:
switch (errorType) {
case del:
if (isThereAnyTargetCharacter()) {
deleteGivenCharacterOverCollection(targetChar);
} else {
deleteRandomCharactersOverCollection();
}
break;
case ins:
if (isThereAnyTargetCharacter()) {
insertGivenCharacterOverCollection(targetChar);
} else {
insertRandomCharactersOverCollection();
}
break;
case insdel:
if (isThereAnyTargetCharacter()) {
insertOrDeleteGivenCharacterOverCollection(targetChar);
} else {
insertOrDeleteRandomCharactersOverCollection();
}
break;
}
break;
}
return this.testBedArray;
}
private char randomCharacter() {
return alphabet[generateBoundedRandom(alphabet.length)];
}
private void deleteRandomCharactersPerString() {
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
int wordLength = testBedArray[stringIndex].length();
int howManyCharsToDelete =
(int)(wordLength * errorsInjectionPercentage / 100.0);
deleteRandomCharactersInString(howManyCharsToDelete, stringIndex);
}
}
private void insertRandomCharactersPerString() {
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
int wordLength = testBedArray[stringIndex].length();
int howManyCharsToInsert =
(int)(wordLength * errorsInjectionPercentage / 100.0);
insertRandomCharactersInString(howManyCharsToInsert, stringIndex);
}
}
private void insertOrDeleteRandomCharactersPerString() {
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
int wordLength = testBedArray[stringIndex].length();
int numberOfErrors = (int)(wordLength * errorsInjectionPercentage/100.0);
int numberOfDeletions = (int)(Math.random() * numberOfErrors);
int numberOfInsertions = numberOfErrors - numberOfDeletions;
deleteRandomCharactersInString(numberOfDeletions, stringIndex);
insertRandomCharactersInString(numberOfInsertions, stringIndex);
}
}
private void deleteRandomCharactersInString(int numberOfDeletions, int stringIndex) {
int wordLength = testBedArray[stringIndex].length();
if (numberOfDeletions <= wordLength) {
for (int j=0; j<numberOfDeletions; j++) {
int position = generateBoundedRandom(wordLength);
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.deleteCharAt(position)
.toString();
wordLength--;
}
}
}
private void insertRandomCharactersInString(int numberOfInsertions, int stringIndex) {
int wordLength = testBedArray[stringIndex].length();
for (int j=0; j<numberOfInsertions; j++) {
int position = generateBoundedRandom(wordLength);
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.insert(position, randomCharacter())
.toString();
wordLength++;
}
}
private void deleteRandomCharactersOverCollection() {
int numberOfErrors = (int)(totalChrs * errorsInjectionPercentage/100.0);
deleteRandomCharactersOverCollection(numberOfErrors);
}
private void insertRandomCharactersOverCollection() {
int numberOfErrors = (int)(totalChrs * errorsInjectionPercentage/100.0);
insertRandomCharactersOverCollection(numberOfErrors);
}
private void insertOrDeleteRandomCharactersOverCollection() {
int numberOfErrors = (int)(totalChrs * errorsInjectionPercentage/100.0);
int numberOfDeletions = (int)(Math.random() * numberOfErrors);
int numberOfInsertions = numberOfErrors - numberOfDeletions;
deleteRandomCharactersOverCollection(numberOfDeletions);
insertRandomCharactersOverCollection(numberOfInsertions);
}
private void deleteRandomCharactersOverCollection(int numberOfDeletions) {
for (int j=0; j<numberOfDeletions; j++) {
int position = generateBoundedRandom(totalChrs);
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
if (testBedArray[stringIndex].length() <= position)
position -= testBedArray[stringIndex].length();
else {
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.deleteCharAt(position)
.toString();
break;
}
}
totalChrs--;
}
}
private void insertRandomCharactersOverCollection(int numberOfInsertions) {
for (int j=0; j<numberOfInsertions; j++) {
int position = generateBoundedRandom(totalChrs);
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
if (testBedArray[stringIndex].length() <= position)
position-=testBedArray[stringIndex].length();
else {
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.insert(position, randomCharacter())
.toString();
break;
}
}
totalChrs++;
}
}
private void deleteGivenCharacterPerString(char targetCharacter) {
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
String stringToScan = testBedArray[stringIndex];
ArrayList<Integer> targetIndexes = findTargetIndexes(stringToScan, targetCharacter);
int numberOfChars =
generateBoundedRandom(
(int)(targetIndexes.size() * errorsInjectionPercentage / 100.0));
int[] deletionPositions = new int[numberOfChars];
int w=0;
for (int j=0; j<numberOfChars; j++) {
int position = generateBoundedRandom(targetIndexes.size());
deletionPositions[w]=targetIndexes.get(position);
w++;
targetIndexes.remove(position);
targetIndexes.trimToSize();
}
Arrays.sort(deletionPositions);
for (int j=deletionPositions.length-1; j>=0; j--) {
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.deleteCharAt(deletionPositions[j])
.toString();
}
}
}
private void deleteGivenCharacterOverCollection(char targetCharacter) {
String stringToScan="";
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
stringToScan = stringToScan.concat(testBedArray[stringIndex]);
}
ArrayList<Integer> targetIndexes = findTargetIndexes(stringToScan, targetCharacter);
if (targetIndexes.size()>=1) {
int numberOfChars =
generateBoundedRandom(
(int)(targetIndexes.size() * errorsInjectionPercentage / 100.0));
deleteFromTargetIndexes(targetIndexes, numberOfChars);
}
else
logger.error("The given character is not in the testbed");
}
private void deleteFromTargetIndexes(ArrayList<Integer> targetIndexes, int numberOfChars) {
int[] deletionPositions = new int[numberOfChars];
int w=0;
for (int j=0; j<numberOfChars; j++) {
int position = generateBoundedRandom(targetIndexes.size());
deletionPositions[w]=targetIndexes.get(position);
w++;
targetIndexes.remove(position);
targetIndexes.trimToSize();
}
Arrays.sort(deletionPositions);
for (int j=deletionPositions.length-1; j>=0; j--) {
int position = deletionPositions[j];
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
if (testBedArray[stringIndex].length() <= position)
position -= testBedArray[stringIndex].length();
else {
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.deleteCharAt(position)
.toString();
break;
}
}
}
}
private void insertGivenCharacterOverCollection(char targetCharacter) {
int numberOfErrors = (int)(totalChrs * errorsInjectionPercentage/100.0);
insertGivenCharacterOverCollection(numberOfErrors, targetCharacter);
}
private void insertGivenCharacterPerString(char targetCharacter) {
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
int wordLength = testBedArray[stringIndex].length();
int howManyCharToInsert = generateBoundedRandom(wordLength);
for (int j=0; j<howManyCharToInsert; j++) {
int position = generateBoundedRandom(wordLength);
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.insert(position, targetCharacter)
.toString();
wordLength++;
}
}
}
private void del3_tmp(int numberOfDeletions, int stringIndex, char targetCharacter, ArrayList<Integer> targetIndexes) {
int[] tmp = new int[numberOfDeletions];
int w=0;
for (int j=0; j<numberOfDeletions; j++) {
int position = generateBoundedRandom(targetIndexes.size());
tmp[w]=targetIndexes.get(position);
w++;
targetIndexes.remove(position);
targetIndexes.trimToSize();
}
Arrays.sort(tmp);
for (int j=tmp.length-1; j>=0; j--) {
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.deleteCharAt(tmp[j])
.toString();
}
}
private void ins3_tmp(int numberOfInsertions, int stringIndex, char targetCharacter, int wordLength) {
for (int j=0; j<numberOfInsertions; j++) {
int position = generateBoundedRandom(wordLength);
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.insert(position, targetCharacter)
.toString();
wordLength++;
}
}
private void insertOrDeleteGivenCharacterPerString(char targetCharacter) {
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
String stringToScan = testBedArray[stringIndex];
ArrayList<Integer> targetIndexes = findTargetIndexes(stringToScan, targetCharacter);
int wordLength = testBedArray[stringIndex].length();
int numberOfErrors = (int)(wordLength * errorsInjectionPercentage/100.0);
int numberOfDeletions = generateBoundedRandom(targetIndexes.size());
int numberOfInsertions = numberOfErrors - numberOfDeletions;
if (numberOfErrors >= numberOfDeletions)
del3_tmp(numberOfDeletions, stringIndex, targetCharacter, targetIndexes);
ins3_tmp(numberOfInsertions, stringIndex, targetCharacter, wordLength);
}
}
private void del4_tmp(int numberOfDeletions, char targetCharacter) {
String stringToScan="";
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
stringToScan = stringToScan.concat(testBedArray[stringIndex]);
}
ArrayList<Integer> targetIndexes = findTargetIndexes(stringToScan, targetCharacter);
if (targetIndexes.size()>=1) {
deleteFromTargetIndexes(targetIndexes, numberOfDeletions);
}
else
logger.error("The given character is not in the testbed");
}
private void insertGivenCharacterOverCollection(int numberOfInsertions, char targetCharacter) {
for (int j = 0; j < numberOfInsertions; j++) {
int position = generateBoundedRandom(totalChrs);
for (int stringIndex = 0; stringIndex < testBedArray.length; stringIndex++) {
if (testBedArray[stringIndex].length() <= position)
position -= testBedArray[stringIndex].length();
else if (testBedArray[stringIndex].length() > position) {
testBedArray[stringIndex] =
new StringBuffer(testBedArray[stringIndex])
.insert(position, targetCharacter)
.toString();
break;
}
}
totalChrs++;
}
}
private void insertOrDeleteGivenCharacterOverCollection(char targetCharacter) {
int numberOfErrors = (int)(totalChrs * errorsInjectionPercentage/100.0);
int numberOfDeletions = (int)(Math.random() * numberOfErrors);
int numberOfInsertions = numberOfErrors-numberOfDeletions;
del4_tmp(numberOfDeletions, targetCharacter);
insertGivenCharacterOverCollection(numberOfInsertions, targetCharacter);
}
} | 16,389 | 31.977867 | 120 | java |
Janus | Janus-master/src/minerful/logmaker/errorinjector/params/ErrorInjectorCmdParameters.java | package minerful.logmaker.errorinjector.params;
import java.io.File;
import minerful.logmaker.errorinjector.ErrorInjector;
import minerful.params.ParamsManager;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class ErrorInjectorCmdParameters extends ParamsManager {
private static final String ERROR_SPREADING_POLICY_PARAM_NAME = "eS";
private static final String ERROR_TYPE_PARAM_NAME = "eT";
private static final String ERROR_PERCENTAGE_PARAM_NAME = "eP";
private static final String TARGET_CHAR_PARAM_NAME = "eC";
public static final String OUTPUT_LOG_PATH_PARAM_NAME = "eLF";
public static final int ERROR_INJECTION_PERCENTAGE_DEFAULT = 0;
private ErrorInjector.SpreadingPolicy errorInjectionSpreadingPolicy;
private ErrorInjector.ErrorType errorType;
private int errorsInjectionPercentage;/*percentage of the errors to inject */
private Character targetChar;
public File logFile;
public ErrorInjectorCmdParameters() {
super();
this.errorInjectionSpreadingPolicy = ErrorInjector.SpreadingPolicy.getDefault();
this.errorType = ErrorInjector.ErrorType.getDefault();
this.errorsInjectionPercentage = ERROR_INJECTION_PERCENTAGE_DEFAULT;
this.targetChar = null;
this.logFile = null;
}
public ErrorInjectorCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public ErrorInjectorCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.errorInjectionSpreadingPolicy =
ErrorInjector.SpreadingPolicy.valueOf(
line.getOptionValue(
ErrorInjectorCmdParameters.ERROR_SPREADING_POLICY_PARAM_NAME,
this.errorInjectionSpreadingPolicy.toString()
)
);
this.errorType =
ErrorInjector.ErrorType.valueOf(
line.getOptionValue(
ErrorInjectorCmdParameters.ERROR_TYPE_PARAM_NAME,
errorType.toString()
)
);
this.errorsInjectionPercentage = Integer.valueOf(
line.getOptionValue(ErrorInjectorCmdParameters.ERROR_PERCENTAGE_PARAM_NAME,
String.valueOf(this.errorsInjectionPercentage)));
if (line.hasOption(ErrorInjectorCmdParameters.TARGET_CHAR_PARAM_NAME)) {
this.targetChar = Character.valueOf(line.getOptionValue(ErrorInjectorCmdParameters.TARGET_CHAR_PARAM_NAME).charAt(0));
}
if (line.hasOption(ErrorInjectorCmdParameters.OUTPUT_LOG_PATH_PARAM_NAME)) {
this.logFile = new File(line.getOptionValue(ErrorInjectorCmdParameters.OUTPUT_LOG_PATH_PARAM_NAME));
}
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt: myOptions.getOptions())
options.addOption((Option)myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(ErrorInjectorCmdParameters.ERROR_SPREADING_POLICY_PARAM_NAME)
.hasArg().argName("policy")
.longOpt("err-spread-policy")
.desc("policy for the distribution of the errors. Possible values are:\n" +
"'" + ErrorInjector.SpreadingPolicy.collection + "'\n to spread the errors over the whole collection of traces [DEFAULT];\n" +
"'" + ErrorInjector.SpreadingPolicy.string + "'\n to inject the errors in every trace")
.type(Integer.class)
.build()
);
options.addOption(
Option.builder(ErrorInjectorCmdParameters.ERROR_TYPE_PARAM_NAME)
.hasArg().argName("type")
.longOpt("err-type")
.desc("type of the errors to inject. Possible values are:\n" +
"'" + ErrorInjector.ErrorType.ins + "'\n suppression of the target task;\n" +
"'" + ErrorInjector.ErrorType.del + "'\n insertion of the target task;\n" +
"'" + ErrorInjector.ErrorType.insdel + "'\n mixed (suppressions or insertions, as decided by random) [DEFAULT]")
.type(Integer.class)
.build()
);
options.addOption(
Option.builder(ErrorInjectorCmdParameters.ERROR_PERCENTAGE_PARAM_NAME)
.hasArg().argName("percent")
.longOpt("err-percentage")
.desc("percentage of the errors to be injected (from 0 to 100) [DEFAULT: 0]")
.type(Integer.class)
.build()
);
options.addOption(
Option.builder(ErrorInjectorCmdParameters.TARGET_CHAR_PARAM_NAME)
.hasArg().argName("char")
.longOpt("err-target")
.desc("target task")
.type(Character.class)
.build()
);
options.addOption(
Option.builder(ErrorInjectorCmdParameters.OUTPUT_LOG_PATH_PARAM_NAME)
.hasArg().argName("file path")
.longOpt("err-out-log")
.desc("path to the file in which the error-injected log is stored")
.type(String.class)
.build()
);
return options;
}
public ErrorInjector.SpreadingPolicy getErrorInjectionSpreadingPolicy() {
return this.errorInjectionSpreadingPolicy;
}
public ErrorInjector.ErrorType getErrorType() {
return this.errorType;
}
public double getErrorsInjectionPercentage() {
return (errorsInjectionPercentage);
}
public Character getTargetChar() {
return targetChar;
}
public boolean isTargetCharDefined() {
return (this.targetChar != null);
}
} | 5,726 | 35.246835 | 132 | java |
Janus | Janus-master/src/minerful/logmaker/params/LogMakerParameters.java | package minerful.logmaker.params;
import java.io.File;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import minerful.params.ParamsManager;
import minerful.stringsmaker.params.StringTracesMakerCmdParameters;
import minerful.utils.MessagePrinter;
public class LogMakerParameters extends ParamsManager {
/**
* Output encoding for the generated event log.
* It can be either XES ({@link http://www.xes-standard.org/openxes/start}),
* MXML ({@link http://www.processmining.org/logs/mxml}), or
* string-based (events are turned into characters and traces into strings).
*/
public static enum Encoding {
/**
* XES ({@link http://www.xes-standard.org/openxes/start})
*/
xes, // default
/**
* MXML ({@link http://www.processmining.org/logs/mxml})
*/
mxml,
/**
* String-based (events are turned into characters and traces into strings)
*/
strings;
}
public static final String OUTPUT_FILE_PARAM_NAME = "oLF";
public static final String OUT_ENC_PARAM_NAME = "oLE";
public static final String SIZE_PARAM_NAME = "oLL";
public static final String MAX_LEN_PARAM_NAME = "oLM";
public static final String MIN_LEN_PARAM_NAME = "oLm";
public static final Long DEFAULT_SIZE = 100L;
public static final Integer DEFAULT_MIN_TRACE_LENGTH = 0;
public static final Integer DEFAULT_MAX_TRACE_LENGTH = 100;
public static final Encoding DEFAULT_OUTPUT_ENCODING = Encoding.xes;
/**
* Minimum number of events that have to be included in the generated traces.
*/
public Integer minEventsPerTrace; // mandatory assignment
/**
* Maximum number of events that have to be included in the generated traces.
*/
public Integer maxEventsPerTrace; // mandatory assignment
/**
* Number of traces in the log.
*/
public Long tracesInLog; // mandatory assignment
/**
* File in which the generated event log is going to be stored.
*/
public File outputLogFile;
/**
* Event log encoding (see {@link Encoding #Encoding}).
*/
public LogMakerParameters.Encoding outputEncoding;
public LogMakerParameters () {
this(DEFAULT_MIN_TRACE_LENGTH, DEFAULT_MAX_TRACE_LENGTH, DEFAULT_SIZE, null, DEFAULT_OUTPUT_ENCODING);
}
public LogMakerParameters(
Integer minEventsPerTrace, Integer maxEventsPerTrace, Long tracesInLog,
File outputLogFile, Encoding outputEncoding) {
super();
this.minEventsPerTrace = minEventsPerTrace;
this.maxEventsPerTrace = maxEventsPerTrace;
this.tracesInLog = tracesInLog;
this.outputLogFile = outputLogFile;
this.outputEncoding = outputEncoding;
}
public LogMakerParameters(
Integer minEventsPerTrace, Integer maxEventsPerTrace, Long tracesInLog,
Encoding outputEncoding) {
this(minEventsPerTrace, maxEventsPerTrace, tracesInLog, null, outputEncoding);
}
public LogMakerParameters(
Integer minEventsPerTrace, Integer maxEventsPerTrace, Long tracesInLog) {
this(minEventsPerTrace, maxEventsPerTrace, tracesInLog, null, null);
}
public LogMakerParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public LogMakerParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.minEventsPerTrace =
Integer.valueOf(
line.getOptionValue(MIN_LEN_PARAM_NAME, this.minEventsPerTrace.toString()));
this.maxEventsPerTrace =
Integer.valueOf(
line.getOptionValue(MAX_LEN_PARAM_NAME, this.maxEventsPerTrace.toString()));
this.tracesInLog =
Long.valueOf(line.getOptionValue(SIZE_PARAM_NAME, this.tracesInLog.toString()));
this.outputEncoding = Encoding.valueOf(
line.getOptionValue(OUT_ENC_PARAM_NAME, this.outputEncoding.toString())
);
this.outputLogFile = openOutputFile(line, OUTPUT_FILE_PARAM_NAME);
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt: myOptions.getOptions())
options.addOption((Option)myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(MIN_LEN_PARAM_NAME)
.hasArg().argName("min-length")
.longOpt("minlen")
.desc("minimum length of the generated traces. It must be greater than or equal to 0"
+ printDefault(DEFAULT_MIN_TRACE_LENGTH))
.type(Integer.class)
.build()
);
options.addOption(
Option.builder(MAX_LEN_PARAM_NAME)
.hasArg().argName("max-length")
.longOpt("maxlen")
.desc("maximum length of the generated traces. It must be greater than or equal to 0"
+ printDefault(DEFAULT_MAX_TRACE_LENGTH))
.type(Integer.class)
.build()
);
options.addOption(
Option.builder(SIZE_PARAM_NAME)
.hasArg().argName("number of traces")
.longOpt("size")
.desc("number of traces to simulate"
+ printDefault(DEFAULT_SIZE))
.type(Long.class)
.build()
);
options.addOption(
Option.builder(OUT_ENC_PARAM_NAME)
.hasArg().argName("language")
.longOpt("out-log-encoding")
.desc("encoding language for the output log " + printValues(LogMakerParameters.Encoding.values())
+ printDefault(fromEnumValueToString(DEFAULT_OUTPUT_ENCODING)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(OUTPUT_FILE_PARAM_NAME)
.hasArg().argName("file path")
.longOpt("out-log-file")
.desc("path of the file in which the log should be written")
.type(String.class)
.build()
);
return options;
}
/**
* Checks that the assigned parameters are valid.
* @return <code>null</code> in case of valid parameters. A string describing the assignment errors otherwise.
*/
public String checkValidity() {
StringBuilder checkFailures = new StringBuilder();
// Mandatory assignments check
if (minEventsPerTrace == null)
checkFailures.append("Minimum number of events per trace unspecified\n");
if (maxEventsPerTrace == null)
checkFailures.append("Maximum number of events per trace unspecified\n");
if (tracesInLog == null) {
checkFailures.append("Number of traces in log unspecified\n");
}
if (checkFailures.length() > 0)
return checkFailures.toString();
// Correct assignments check
if (minEventsPerTrace < 0)
checkFailures.append("Negative minimum number of events per trace specified\n");
if (maxEventsPerTrace < 0)
checkFailures.append("Negative maximum number of events per trace specified\n");
if (minEventsPerTrace > maxEventsPerTrace)
checkFailures.append("Maximum number of events per trace are specified to be less than the minimum\n");
if (tracesInLog < 0)
checkFailures.append("Negative number of traces specified\n");
if (outputLogFile != null && outputLogFile.isDirectory()) {
checkFailures.append("Directory specified in place of a file to save the log\n");
}
if (checkFailures.length() > 0)
return checkFailures.toString();
return null;
}
} | 7,558 | 32.745536 | 111 | java |
Janus | Janus-master/src/minerful/logmaker/params/SortingCriterion.java | package minerful.logmaker.params;
/**
* The criterion according to which traces should be sorted in the event log
*/
public enum SortingCriterion {
/**
* Sort by the timestamp of the first event in the trace, ascending
*/
FIRST_EVENT_ASC,
/**
* Sort by the timestamp of the last event in the trace, ascending
*/
LAST_EVENT_ASC,
/**
* Sort by the length of the trace (in the number of events), ascending
*/
TRACE_LENGTH_ASC,
/**
* Sort by the length of the trace (in the number of events), descending
*/
TRACE_LENGTH_DESC
} | 549 | 22.913043 | 76 | java |
Janus | Janus-master/src/minerful/logmaker/params/XesLogSorterParameters.java | package minerful.logmaker.params;
import java.io.File;
import java.util.ArrayList;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import minerful.params.ParamsManager;
public class XesLogSorterParameters extends ParamsManager {
public static final String TRACES_SORTING_CRITERIA_PARAM_NAME = "trSort";
public static final String INPUT_XES_PARAM_NAME = "trXESin";
public static final String OUTPUT_XES_PARAM_NAME = "trXESout";
public static final SortingCriterion[] DEFAULT_TRACES_SORTING_CRITERIA =
new SortingCriterion[]{ SortingCriterion.FIRST_EVENT_ASC };
/**
* The criteria according to which traces should be sorted in the event log.
* The order in which they are given impacts the respective priority.
*/
public SortingCriterion[] tracesSortingCriteria; // mandatory assignment
/**
* File in which the generated XES ({@link http://www.xes-standard.org/openxes/start}) event log is going to be stored.
*/
public File outputXesFile; // mandatory assignment
/**
* File from which the original XES ({@link http://www.xes-standard.org/openxes/start}) is read.
*/
public File inputXesFile; // mandatory assignment
public XesLogSorterParameters () {
this(DEFAULT_TRACES_SORTING_CRITERIA,null,null);
}
public XesLogSorterParameters(SortingCriterion[] tracesSortingCriteria, File outputXesFile, File inputXesFile) {
super();
this.tracesSortingCriteria = tracesSortingCriteria;
this.outputXesFile = outputXesFile;
this.inputXesFile = inputXesFile;
}
public XesLogSorterParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public XesLogSorterParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.udpateSortingCriteria(line.getOptionValue(TRACES_SORTING_CRITERIA_PARAM_NAME));
this.inputXesFile = openInputFile(line, INPUT_XES_PARAM_NAME);
this.outputXesFile = openOutputFile(line, OUTPUT_XES_PARAM_NAME);
}
private void udpateSortingCriteria(String paramString) {
String[] tokens = tokenise(paramString);
if (tokens == null)
return;
ArrayList<SortingCriterion> listOfCriteria = new ArrayList<SortingCriterion>(tokens.length);
SortingCriterion criterion = null;
for (String token : tokens) {
token = fromStringToEnumValue(token);
try {
criterion = SortingCriterion.valueOf(token);
} catch (Exception e) {
System.err.println("Invalid option for " + TRACES_SORTING_CRITERIA_PARAM_NAME + ": " + token + " is going to be ignored.");
}
listOfCriteria.add(criterion);
}
if (listOfCriteria.size() > 0) {
this.tracesSortingCriteria = listOfCriteria.toArray(new SortingCriterion[0]);
} else {
System.err.println("No valid option for " + TRACES_SORTING_CRITERIA_PARAM_NAME + ". Using default value.");
}
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(TRACES_SORTING_CRITERIA_PARAM_NAME)
.hasArg().argName("criteria")
.longOpt("traces-sorting-criteria")
.desc("The criteria according to which traces should be sorted in the event log.\n" +
"The order in which they are given impacts the respective priority. It can be a " + ARRAY_TOKENISER_SEPARATOR + "-separated list of the following: " + printValues(SortingCriterion.values())
+ printDefault(fromEnumValuesToTokenJoinedString(DEFAULT_TRACES_SORTING_CRITERIA)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(OUTPUT_XES_PARAM_NAME)
.hasArg().argName("file path")
.longOpt("out-xes-log")
.desc("path of the file in which the XES log should be written.")
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_XES_PARAM_NAME)
.hasArg().argName("file path")
.longOpt("in-xes-log")
.desc("path of the file from which the XES log should be read.")
.type(String.class)
.build()
);
return options;
}
} | 4,374 | 35.157025 | 195 | java |
Janus | Janus-master/src/minerful/logparser/AbstractLogEventClassifier.java | package minerful.logparser;
public abstract class AbstractLogEventClassifier implements LogEventClassifier {
protected final LogEventClassifier.ClassificationType eventClassificationType;
public AbstractLogEventClassifier(LogEventClassifier.ClassificationType eventClassificationType) {
this.eventClassificationType = eventClassificationType;
}
@Override
public LogEventClassifier.ClassificationType getEventClassificationType() {
return eventClassificationType;
}
} | 480 | 31.066667 | 99 | java |
Janus | Janus-master/src/minerful/logparser/AbstractLogParser.java | package minerful.logparser;
import java.io.File;
import java.util.*;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskCharArchive;
import minerful.io.encdec.TaskCharEncoderDecoder;
import minerful.utils.MessagePrinter;
public abstract class AbstractLogParser implements LogParser {
public static MessagePrinter logger = MessagePrinter.getInstance(AbstractLogParser.class);
private int minimumTraceLength = UNDEFINED_MINIMUM_LENGTH;
private int maximumTraceLength = UNDEFINED_MAXIMUM_LENGTH;
private int numberOfEvents = UNDEFINED_MAXIMUM_LENGTH;
protected TaskCharEncoderDecoder taChaEncoDeco;
protected TaskCharArchive taskCharArchive;
protected Integer startingTrace;
protected Integer subLogLength;
protected List<LogTraceParser> traceParsers;
/**
* Differently than {@link #traceParsers this.traceParser}, this attribute
* does not consider all traces but just those that we want to make visible
* to the miner.
* This decision will be affected by
* {@link #startingTrace this.startingTrace}
* and
* {@link #subLogLength this.subLogLength}.
*/
protected List<LogTraceParser> navigableTraceParsers;
protected AbstractLogParser(TaskCharEncoderDecoder taChaEncoDeco,
TaskCharArchive taskCharArchive,
List<LogTraceParser> traceParsers,
Integer startingTrace,
Integer subLogLength) {
this.taChaEncoDeco = taChaEncoDeco;
this.taskCharArchive = taskCharArchive;
this.traceParsers = traceParsers;
init(startingTrace, subLogLength);
this.postInit();
}
protected void init(Integer startingTrace, Integer subLogLength) {
if (subLogLength < 0) {
throw new IllegalArgumentException("The length of the sub-log should be a positive integer!");
}
if (startingTrace < 0) {
throw new IllegalArgumentException("The initial trace number should be a positive integer!");
}
this.startingTrace = startingTrace;
this.subLogLength = subLogLength;
}
protected void postInit() {
this.setUpNavigableTraceParsers();
this.updateLogStats();
}
protected void updateLogStats() {
if (this.navigableTraceParsers == null)
throw new IllegalStateException("You should invoke AbstractLogParser.setUpNavigableTraceParsers() before AbstractLogParser.updateLogStats()");
for (LogTraceParser logTraceParser : this.navigableTraceParsers) {
updateMaximumTraceLength(logTraceParser.length());
updateMinimumTraceLength(logTraceParser.length());
updateNumberOfEvents(logTraceParser.length());
}
}
protected void setUpNavigableTraceParsers() {
if (this.startingTrace >= this.wholeLength()) {
logger.warn("The given starting trace number (" + this.startingTrace + ") is higher than the size of the event log (" + this.wholeLength() + "). Restoring it to default (0)");
this.startingTrace = 0;
}
if (this.subLogLength > this.wholeLength() - this.startingTrace) {
logger.warn("The given length of the sub-log (" + this.subLogLength + ") is too high. Changing its value to the maximum possible value");
this.subLogLength = this.wholeLength() - this.startingTrace;
}
if (this.subLogLength > 0 || this.startingTrace > 0) {
int
i = 0,
actualLength = Math.min(
this.subLogLength,
this.wholeLength() - this.startingTrace);
this.navigableTraceParsers =
new ArrayList<LogTraceParser>(actualLength);
Iterator<LogTraceParser> parsers =
this.traceParsers.listIterator(this.startingTrace);
while (parsers.hasNext() && i < actualLength) {
this.navigableTraceParsers.add(i++, parsers.next());
}
} else {
this.navigableTraceParsers = this.traceParsers;
}
}
protected AbstractLogParser() {
}
protected abstract Collection<AbstractTaskClass> parseLog(File logFile) throws Exception;
protected void updateNumberOfEvents(int numberOfEvents) {
this.numberOfEvents += numberOfEvents;
}
protected void updateMaximumTraceLength(int numberOfEvents) {
if (numberOfEvents > this.maximumTraceLength) {
this.maximumTraceLength = numberOfEvents;
}
}
protected void updateMinimumTraceLength(int numberOfEvents) {
if (numberOfEvents < this.minimumTraceLength) {
this.minimumTraceLength = numberOfEvents;
}
}
@Override
public TaskCharEncoderDecoder getEventEncoderDecoder() {
return this.taChaEncoDeco;
}
@Override
public int minimumTraceLength() {
return this.minimumTraceLength;
}
@Override
public int maximumTraceLength() {
return this.maximumTraceLength;
}
@Override
public int numberOfEvents() {
return this.numberOfEvents;
}
@Override
public Iterator<LogTraceParser> traceIterator() {
return this.navigableTraceParsers.listIterator(0);
}
@Override
public int length() {
return this.navigableTraceParsers.size();
}
@Override
public int wholeLength() {
return this.traceParsers.size();
}
protected void archiveTaskChars(Collection<AbstractTaskClass> classes, TaskCharArchive taskCharArchive) {
if (taskCharArchive == null) {
this.taChaEncoDeco.encode(classes.toArray(new AbstractTaskClass[classes.size()]));
this.taskCharArchive = new TaskCharArchive(this.taChaEncoDeco.getTranslationMap());
} else { // to be sure that the log encoding matches an existing encoding
this.taChaEncoDeco.encode(taskCharArchive.getTaskChars()); //place first the already coded characters
this.taChaEncoDeco.encode(classes.toArray(new AbstractTaskClass[classes.size()]));
this.taskCharArchive = new TaskCharArchive(this.taChaEncoDeco.getTranslationMap());
}
}
@Override
public TaskCharArchive getTaskCharArchive() {
return this.taskCharArchive;
}
protected abstract AbstractLogParser makeACopy(
TaskCharEncoderDecoder taChaEncoDeco,
TaskCharArchive taskCharArchive,
List<LogTraceParser> navigableTraceParsers,
Integer startingTrace,
Integer subLogLength);
@Override
public List<LogParser> split(Integer parts) {
if (parts <= 0)
throw new IllegalArgumentException("The log cannot be split in " + parts + " parts. Only positive integer values are allowed");
int tracesPerSlice = this.navigableTraceParsers.size() / parts;
List<LogParser> logParsers = new ArrayList<LogParser>(parts);
List<LogTraceParser> auxTraceParsers = new ArrayList<LogTraceParser>(tracesPerSlice);
List<List<LogTraceParser>> portions = new ArrayList<List<LogTraceParser>>();
int
traceRunner = 0,
traceCounter = 0,
traceParsersListCounter = 0;
/*
* If you read this line, and feel the urge to curse me, I cannot blame you.
* Yours, Claudio Di Ciccio ([email protected])
*/
for (traceParsersListCounter = parts; traceParsersListCounter > 0; traceParsersListCounter--) {
portions.add(new ArrayList<LogTraceParser>(tracesPerSlice));
}
auxTraceParsers = portions.get(traceParsersListCounter);
for (; traceRunner < tracesPerSlice * parts; traceRunner++, traceCounter++) {
if (traceCounter >= tracesPerSlice) {
traceCounter = 0;
traceParsersListCounter++;
auxTraceParsers = portions.get(traceParsersListCounter);
}
auxTraceParsers.add(navigableTraceParsers.get(traceRunner));
}
for (; traceRunner < this.navigableTraceParsers.size(); traceRunner++) {
auxTraceParsers.add(navigableTraceParsers.get(traceRunner));
}
for (List<LogTraceParser> portion : portions) {
logParsers.add(
this.makeACopy(
taChaEncoDeco,
taskCharArchive,
portion,
0,
0)
);
}
return logParsers;
}
@Override
public LogParser takeASlice(Integer from, Integer length) {
return this.makeACopy(taChaEncoDeco, taskCharArchive, traceParsers, from, length);
}
@Override
public void excludeTasksByName(Collection<String> tasksToExcludeFromResult) {
Collection<AbstractTaskClass> taskClassesToExclude = getEventEncoderDecoder().excludeThese(tasksToExcludeFromResult);
this.taskCharArchive.removeAllByClass(taskClassesToExclude);
}
/**
* Shuffle randomly the iterable traces.
*
* Beware, side effect.
*/
@Override
public void shuffleTraces(){
Collections.shuffle(this.traceParsers);
}
} | 9,393 | 35.984252 | 187 | java |
Janus | Janus-master/src/minerful/logparser/AbstractTraceParser.java | package minerful.logparser;
public abstract class AbstractTraceParser implements LogTraceParser {
protected boolean parsing;
protected SenseOfReading senseOfReading = SenseOfReading.ONWARDS;
@Override
public boolean isParsing() {
return parsing;
}
@Override
public SenseOfReading reverse() {
this.senseOfReading = this.senseOfReading.switchSenseOfReading();
return this.senseOfReading;
}
@Override
public SenseOfReading getSenseOfReading() {
return senseOfReading;
}
} | 492 | 20.434783 | 69 | java |
Janus | Janus-master/src/minerful/logparser/CharTaskClass.java | package minerful.logparser;
import javax.xml.bind.annotation.XmlType;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskClass;
import minerful.io.encdec.TaskCharEncoderDecoder;
@XmlType
public class CharTaskClass extends AbstractTaskClass implements TaskClass {
public final Character charClass;
public CharTaskClass(Character charClass) {
this.charClass = charClass;
}
@Override
public int compareTo(TaskClass o) {
if (o instanceof CharTaskClass)
return this.charClass.compareTo(((CharTaskClass) o).charClass);
else
return super.compareTo(o);
}
@Override
public String getName() {
return charClass.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((charClass == null) ? 0 : charClass.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
CharTaskClass other = (CharTaskClass) obj;
if (charClass == null) {
if (other.charClass != null)
return false;
} else if (!charClass.equals(other.charClass))
return false;
return true;
}
} | 1,231 | 21.4 | 75 | java |
Janus | Janus-master/src/minerful/logparser/LogEventClassifier.java | package minerful.logparser;
import java.util.Collection;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskClass;
public interface LogEventClassifier {
public enum ClassificationType {
NAME("name"),
LOG_SPECIFIED("logspec");
public final String type;
private ClassificationType(String type) {
this.type = type;
}
}
ClassificationType getEventClassificationType();
Collection<AbstractTaskClass> getTaskClasses();
} | 460 | 19.043478 | 49 | java |
Janus | Janus-master/src/minerful/logparser/LogEventParser.java | package minerful.logparser;
import minerful.concept.Event;
public interface LogEventParser {
public Character evtIdentifier();
public Event getEvent();
} | 161 | 13.727273 | 34 | java |
Janus | Janus-master/src/minerful/logparser/LogParser.java | package minerful.logparser;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import minerful.concept.TaskCharArchive;
import minerful.io.encdec.TaskCharEncoderDecoder;
public interface LogParser {
int UNDEFINED_MAXIMUM_LENGTH = -1;
int UNDEFINED_MINIMUM_LENGTH = Integer.MAX_VALUE;
int length();
int minimumTraceLength();
int maximumTraceLength();
Iterator<LogTraceParser> traceIterator();
TaskCharEncoderDecoder getEventEncoderDecoder();
LogEventClassifier getEventClassifier();
int numberOfEvents();
TaskCharArchive getTaskCharArchive();
List<LogParser> split(Integer parts);
void excludeTasksByName(Collection<String> activitiesToExcludeFromResult);
LogParser takeASlice(Integer from, Integer length);
int wholeLength();
void shuffleTraces();
} | 800 | 26.62069 | 75 | java |
Janus | Janus-master/src/minerful/logparser/LogTraceParser.java | package minerful.logparser;
public interface LogTraceParser {
enum SenseOfReading {
ONWARDS,
BACKWARDS;
public SenseOfReading switchSenseOfReading() {
return (this.equals(ONWARDS) ? BACKWARDS : ONWARDS);
}
}
SenseOfReading reverse();
SenseOfReading getSenseOfReading();
int length();
LogParser getLogParser();
boolean isParsing();
LogEventParser parseSubsequent();
Character parseSubsequentAndEncode();
boolean isParsingOver();
boolean stepToSubsequent();
void init();
String encodeTrace();
String printStringTrace();
String getName();
} | 568 | 20.884615 | 55 | java |
Janus | Janus-master/src/minerful/logparser/StringEventClassifier.java | package minerful.logparser;
import java.util.Collection;
import java.util.Set;
import java.util.TreeSet;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskClass;
public class StringEventClassifier extends AbstractLogEventClassifier implements LogEventClassifier {
public StringEventClassifier(ClassificationType eventClassificationType) {
super(eventClassificationType);
}
private Set<AbstractTaskClass> classes = new TreeSet<AbstractTaskClass>();
public AbstractTaskClass classify(Character chr) {
CharTaskClass chaTaCla = new CharTaskClass(chr);
this.classes.add(chaTaCla);
return chaTaCla;
}
public Collection<AbstractTaskClass> classify(String trace) {
for (Character chr : trace.toCharArray()) {
this.classes.add(new CharTaskClass(chr));
}
return classes;
}
@Override
public Collection<AbstractTaskClass> getTaskClasses() {
return this.classes;
}
}
| 914 | 24.416667 | 101 | java |
Janus | Janus-master/src/minerful/logparser/StringEventParser.java | package minerful.logparser;
import minerful.concept.AbstractTaskClass;
import minerful.concept.Event;
public class StringEventParser implements LogEventParser {
private StringTraceParser strTraceParser;
public final Character strEvent;
public StringEventParser(StringTraceParser stringTraceParser, Character strEvent) {
this.strTraceParser = stringTraceParser;
this.strEvent = strEvent;
}
@Override
public Character evtIdentifier() {
AbstractTaskClass logEventClass = this.strTraceParser.strLogParser.strEventClassifier.classify(strEvent);
return this.strTraceParser.strLogParser.taChaEncoDeco.encode(logEventClass);
}
@Override
public Event getEvent() {
AbstractTaskClass logEventClass = this.strTraceParser.strLogParser.strEventClassifier.classify(strEvent);
return new Event(logEventClass);
}
}
| 826 | 28.535714 | 107 | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.