repo
stringlengths 1
191
⌀ | file
stringlengths 23
351
| code
stringlengths 0
5.32M
| file_length
int64 0
5.32M
| avg_line_length
float64 0
2.9k
| max_line_length
int64 0
288k
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
Janus | Janus-master/src/minerful/logparser/StringLogParser.java | package minerful.logparser;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskCharArchive;
import minerful.io.encdec.TaskCharEncoderDecoder;
public class StringLogParser extends AbstractLogParser implements LogParser {
StringEventClassifier strEventClassifier;
protected StringLogParser(TaskCharEncoderDecoder taChaEncoDeco,
TaskCharArchive taskCharArchive, List<LogTraceParser> traceParsers,
StringEventClassifier strEventClassifier,
Integer startingTrace,
Integer subLogLength) {
super(taChaEncoDeco, taskCharArchive, traceParsers, startingTrace, subLogLength);
this.strEventClassifier = strEventClassifier;
}
public StringLogParser(String[] strings,
LogEventClassifier.ClassificationType evtClassType) {
this(strings, evtClassType, 0, 0);
}
public StringLogParser(String[] strings,
LogEventClassifier.ClassificationType evtClassType,
Integer startingTrace,
Integer subLogLength) {
this.init(evtClassType, startingTrace, subLogLength);
super.archiveTaskChars(this.parseLog(strings), null);
super.postInit();
}
public StringLogParser(File stringsLogFile,
LogEventClassifier.ClassificationType evtClassType) throws Exception {
this(stringsLogFile, evtClassType, 0, 0, null);
}
public StringLogParser(File stringsLogFile,
LogEventClassifier.ClassificationType evtClassType,
TaskCharArchive taskCharArchive) throws Exception {
this(stringsLogFile, evtClassType, 0, 0, taskCharArchive);
}
public StringLogParser(File stringsLogFile,
LogEventClassifier.ClassificationType evtClassType,
Integer startingTrace,
Integer subLogLength,
TaskCharArchive taskCharArchive) throws Exception {
if (!stringsLogFile.canRead()) {
throw new IllegalArgumentException("Unparsable log file: " + stringsLogFile.getAbsolutePath());
}
this.init(evtClassType, startingTrace, subLogLength);
super.archiveTaskChars(this.parseLog(stringsLogFile), taskCharArchive);
super.postInit();
}
private void init(
LogEventClassifier.ClassificationType evtClassType,
Integer startingTrace,
Integer subLogLength) {
this.taChaEncoDeco = new TaskCharEncoderDecoder();
this.strEventClassifier = new StringEventClassifier(evtClassType);
this.traceParsers = new ArrayList<LogTraceParser>();
super.init(startingTrace, subLogLength);
}
protected Collection<AbstractTaskClass> parseLog(String[] strings) {
for (String strLine : strings) {
strLine = strLine.trim();
this.updateClasses(strLine);
}
return this.strEventClassifier.getTaskClasses();
}
private void updateTraceParsers(String strLine) {
this.traceParsers.add(new StringTraceParser(strLine, this));
}
private void updateClasses(String strLine) {
for (char chr : strLine.toCharArray()) {
this.strEventClassifier.classify(chr);
}
}
@Override
protected Collection<AbstractTaskClass> parseLog(File stringsLogFile) throws Exception {
FileInputStream fstream = new FileInputStream(stringsLogFile);
DataInputStream in = new DataInputStream(fstream);
BufferedReader br = new BufferedReader(new InputStreamReader(in));
String strLine = br.readLine();
while (strLine != null) {
strLine = strLine.trim();
updateTraceParsers(strLine);
updateClasses(strLine);
strLine = br.readLine();
}
in.close();
return this.strEventClassifier.getTaskClasses();
}
@Override
public LogEventClassifier getEventClassifier() {
return this.strEventClassifier;
}
@Override
protected AbstractLogParser makeACopy(
TaskCharEncoderDecoder taChaEncoDeco,
TaskCharArchive taskCharArchive,
List<LogTraceParser> traceParsers,
Integer startingTrace,
Integer subLogLength) {
return new StringLogParser(taChaEncoDeco, taskCharArchive, traceParsers, strEventClassifier, startingTrace, subLogLength);
}
} | 4,836 | 35.643939 | 130 | java |
Janus | Janus-master/src/minerful/logparser/StringTaskClass.java | package minerful.logparser;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskClass;
import minerful.io.encdec.TaskCharEncoderDecoder;
public class StringTaskClass extends AbstractTaskClass implements TaskClass {
public static StringTaskClass WILD_CARD = new StringTaskClass(TaskCharEncoderDecoder.WILDCARD_STRING);
protected StringTaskClass() {
super();
}
public StringTaskClass(String classString) {
this.className = classString;
}
@Override
public int compareTo(TaskClass o) {
if (o instanceof StringTaskClass)
return this.className.compareTo(((StringTaskClass) o).className);
else
return super.compareTo(o);
}
@Override
public String getName() {
return className;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((className == null) ? 0 : className.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
StringTaskClass other = (StringTaskClass) obj;
if (className == null) {
if (other.className != null)
return false;
} else if (!className.equals(other.className))
return false;
return true;
}
} | 1,296 | 22.160714 | 103 | java |
Janus | Janus-master/src/minerful/logparser/StringTraceParser.java | package minerful.logparser;
public class StringTraceParser extends AbstractTraceParser implements LogTraceParser {
private String strTrace;
StringLogParser strLogParser;
private int currentIndex;
private StringEventParser strEventParser;
public StringTraceParser(String strTrace, StringLogParser strLogParser) {
this.strTrace = strTrace;
this.strLogParser = strLogParser;
this.parsing = true;
this.init();
}
@Override
public LogParser getLogParser() {
return this.strLogParser;
}
@Override
public Character parseSubsequentAndEncode() {
Character encodedEvent = null;
if (stepToSubsequent()) {
encodedEvent = strEventParser.evtIdentifier();
}
return encodedEvent;
}
@Override
public LogEventParser parseSubsequent() {
if (stepToSubsequent()) {
return strEventParser;
}
return null;
}
@Override
public boolean isParsingOver() {
return (
this.isParsing() &&
(this.senseOfReading.equals(SenseOfReading.BACKWARDS) && this.currentIndex <= 0)
||
(this.senseOfReading.equals(SenseOfReading.ONWARDS) && this.currentIndex >= this.strTrace.length() -1));
}
@Override
public boolean stepToSubsequent() {
if (!isParsingOver()) {
switch(this.senseOfReading) {
case ONWARDS:
this.currentIndex++;
this.strEventParser = new StringEventParser(this, this.strTrace.charAt(currentIndex));
break;
case BACKWARDS:
this.currentIndex--;
this.strEventParser = new StringEventParser(this, this.strTrace.charAt(currentIndex));
break;
default:
break;
}
} else {
this.strEventParser = null;
this.parsing = false;
}
return isParsing();
}
@Override
public void init() {
switch (this.getSenseOfReading()) {
case BACKWARDS:
this.currentIndex = strTrace.length();
break;
case ONWARDS:
default:
this.currentIndex = -1;
break;
}
this.parsing = true;
}
@Override
public int length() {
return strTrace.length();
}
@Override
public String encodeTrace() {
return strTrace;
}
@Override
public String printStringTrace() {
return strTrace;
}
@Override
public String getName() {
return strTrace;
}
} | 2,139 | 19.776699 | 107 | java |
Janus | Janus-master/src/minerful/logparser/TraceParser.java | package minerful.logparser;
public interface TraceParser {
enum SenseOfReading {
ONWARDS,
BACKWARDS
}
SenseOfReading reverse();
int length();
LogEventParser next();
} | 180 | 11.066667 | 30 | java |
Janus | Janus-master/src/minerful/logparser/XesEventClassifier.java | package minerful.logparser;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.TreeSet;
import minerful.concept.AbstractTaskClass;
import org.deckfour.xes.classification.XEventClasses;
import org.deckfour.xes.classification.XEventClassifier;
import org.deckfour.xes.classification.XEventNameClassifier;
import org.deckfour.xes.extension.std.XConceptExtension;
import org.deckfour.xes.model.XAttributeLiteral;
import org.deckfour.xes.model.XEvent;
import org.deckfour.xes.model.XLog;
import org.deckfour.xes.model.XTrace;
public class XesEventClassifier extends AbstractLogEventClassifier implements LogEventClassifier {
private XEventClassifier DEFAULT_XES_EVENT_CLASSIFIER = new XEventNameClassifier();
private XEventClassifier xesNativeEventClassifier;
private XLog xLog;
private XEventClasses xEvtClasses;
public XesEventClassifier(LogEventClassifier.ClassificationType eventClassificationType) {
super(eventClassificationType);
this.xesNativeEventClassifier = (eventClassificationType.equals(ClassificationType.LOG_SPECIFIED) ? null : DEFAULT_XES_EVENT_CLASSIFIER);
this.xEvtClasses = ( this.xesNativeEventClassifier == null ? null : new XEventClasses(this.xesNativeEventClassifier) );
}
public XesTaskClass classify(XEvent xesNativeEvent) {
if (this.xEvtClasses == null)
throw new IllegalStateException("No classes for events available, until at least an instance of XLog has been parsed");
return new XesTaskClass(xEvtClasses.getClassOf(xesNativeEvent));
}
/**
* TODO It should not ignore any other classifier but the first one!
* @param logSpecifiedEventClassifiers
* @param xLog
* @return
*/
public boolean addXesClassifiers(List<XEventClassifier> logSpecifiedEventClassifiers, XLog xLog) {
boolean newClassifierConsidered = false;
if (this.xesNativeEventClassifier == null) {
for(int i = 0; i < logSpecifiedEventClassifiers.size() && this.xesNativeEventClassifier == null; i++) {
this.xesNativeEventClassifier = logSpecifiedEventClassifiers.get(i);
}
newClassifierConsidered = true;
}
if (newClassifierConsidered || this.xEvtClasses.size() == 0) {
this.xEvtClasses = XEventClasses.deriveEventClasses(xesNativeEventClassifier, xLog);
}
return newClassifierConsidered;
}
@Override
public Collection<AbstractTaskClass> getTaskClasses() {
if (this.xEvtClasses == null)
throw new IllegalStateException("No classes for events available, until at least an instance of XLog has been parsed");
Collection<AbstractTaskClass> taskClasses = new ArrayList<AbstractTaskClass>(this.xEvtClasses.size());
for ( int i = 0; i < this.xEvtClasses.size(); i++ ) {
taskClasses.add(new XesTaskClass(this.xEvtClasses.getByIndex(i)));
}
return taskClasses;
}
@Deprecated
public String getClassNameOf(XEvent xesNativeEvent) {
String classString = null;
if (this.eventClassificationType.equals(ClassificationType.NAME)) {
classString = ((XAttributeLiteral)(xesNativeEvent.getAttributes().get(XConceptExtension.KEY_NAME))).getValue();
} else {
if (this.xesNativeEventClassifier != null) {
classString = this.xesNativeEventClassifier.getClassIdentity(xesNativeEvent);
} else {
throw new IllegalStateException("Native event classifier not yet defined!");
}
}
return classString;
}
@Deprecated
public Collection<String> getClassNames() {
Collection<String> classes = new TreeSet<String>();
if (this.xesNativeEventClassifier != null) {
for (XTrace xTrace : xLog) {
/*
for (XEventClass xEvClass : xLog.getInfo(this.xesNativeEventClassifier).getEventClasses().getClasses()) {
classes.add(xEvClass.getId());
}
*/
for (XEvent xEvent : xTrace) {
classes.add(this.xesNativeEventClassifier.getClassIdentity(xEvent));
}
}
} else {
if (this.xesNativeEventClassifier.equals(DEFAULT_XES_EVENT_CLASSIFIER)) {
for (XTrace xTrace : xLog) {
for (XEvent xEvent : xTrace) {
classes.add(((XAttributeLiteral)(xEvent.getAttributes().get(XConceptExtension.KEY_NAME))).getValue());
}
}
} else {
throw new UnsupportedOperationException("To date, no other classification than log-native or name-based is supported");
}
}
return classes;
}
} | 4,254 | 36.324561 | 139 | java |
Janus | Janus-master/src/minerful/logparser/XesEventParser.java | package minerful.logparser;
import minerful.concept.Event;
import minerful.concept.AbstractTaskClass;
import org.deckfour.xes.model.XEvent;
public class XesEventParser implements LogEventParser {
private XesTraceParser xesTraceParser;
public final XEvent xesEvent;
public XesEventParser(XesTraceParser xesTraceParser, XEvent xesEvent) {
this.xesTraceParser = xesTraceParser;
this.xesEvent = xesEvent;
}
@Override
public Character evtIdentifier() {
AbstractTaskClass logEventClass = this.xesTraceParser.xesLogParser.xesEventClassifier.classify(xesEvent);
return this.xesTraceParser.xesLogParser.taChaEncoDeco.encode(logEventClass);
}
public String getValue(String identifier) {
return this.xesEvent.getAttributes().get(identifier).toString();
}
@Override
public Event getEvent() {
AbstractTaskClass logEventClass = this.xesTraceParser.xesLogParser.xesEventClassifier.classify(xesEvent);
return new Event(logEventClass);
}
} | 955 | 28.875 | 107 | java |
Janus | Janus-master/src/minerful/logparser/XesLogParser.java | package minerful.logparser;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.deckfour.xes.in.XMxmlGZIPParser;
import org.deckfour.xes.in.XMxmlParser;
import org.deckfour.xes.in.XParser;
import org.deckfour.xes.in.XesXmlGZIPParser;
import org.deckfour.xes.in.XesXmlParser;
import org.deckfour.xes.model.XLog;
import org.deckfour.xes.model.XTrace;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskCharArchive;
import minerful.io.encdec.TaskCharEncoderDecoder;
public class XesLogParser extends AbstractLogParser implements LogParser {
protected XParser parser;
protected XesEventClassifier xesEventClassifier;
protected List<XLog> xLogs = null;
protected XesLogParser(TaskCharEncoderDecoder taChaEncoDeco,
TaskCharArchive taskCharArchive,
List<LogTraceParser> traceParsers,
Integer startingTrace,
Integer subLogLength,
XParser parser,
XesEventClassifier xesEventClassifier,
List<XLog> xLogs) {
super(taChaEncoDeco, taskCharArchive, traceParsers, startingTrace, subLogLength);
this.parser = parser;
this.xesEventClassifier = xesEventClassifier;
this.xLogs = xLogs;
}
private void init(
LogEventClassifier.ClassificationType evtClassType,
Integer startingTrace,
Integer subLogLength) {
this.traceParsers = new ArrayList<LogTraceParser>();
this.taChaEncoDeco = new TaskCharEncoderDecoder();
this.parser = new XesXmlParser();
this.xesEventClassifier = new XesEventClassifier(evtClassType);
super.init(startingTrace, subLogLength);
}
public XesLogParser(File xesFile,
LogEventClassifier.ClassificationType evtClassType) throws Exception {
this(xesFile, evtClassType, 0, 0, null);
}
public XesLogParser(File xesFile,
LogEventClassifier.ClassificationType evtClassType,
TaskCharArchive taskCharArchive) throws Exception {
this(xesFile, evtClassType, 0, 0, taskCharArchive);
}
public XesLogParser(
File xesFile,
LogEventClassifier.ClassificationType evtClassType,
Integer startingTrace,
Integer subLogLength,
TaskCharArchive taskCharArchive) throws Exception {
this.init(evtClassType, startingTrace, subLogLength);
if (!this.parser.canParse(xesFile)) {
this.parser = new XesXmlGZIPParser();
if (!this.parser.canParse(xesFile)) {
this.parser = new XMxmlParser();
if (!this.parser.canParse(xesFile)) {
this.parser = new XMxmlGZIPParser();
if (!this.parser.canParse(xesFile)) {
throw new IllegalArgumentException("Unparsable log file: " + xesFile.getAbsolutePath());
}
}
}
}
super.archiveTaskChars(this.parseLog(xesFile), taskCharArchive);
super.postInit();
}
public XesLogParser(XLog xLog,
LogEventClassifier.ClassificationType evtClassType) {
this(xLog, evtClassType, 0, 0);
}
public XesLogParser(
XLog xLog,
LogEventClassifier.ClassificationType evtClassType,
Integer startingTrace,
Integer subLogLength) {
this.init(evtClassType, startingTrace, subLogLength);
super.archiveTaskChars(this.parseLog(xLog), null);
super.postInit();
}
@Override
protected Collection<AbstractTaskClass> parseLog(File xesFile) throws Exception {
this.xLogs = parser.parse(xesFile);
for (XLog xLog : xLogs) {
this.parseLog(xLog);
}
return this.xesEventClassifier.getTaskClasses();
}
protected Collection<AbstractTaskClass> parseLog(XLog xLog) {
XesTraceParser auXTraPar = null;
this.xesEventClassifier.addXesClassifiers(xLog.getClassifiers(), xLog);
for (XTrace trace : xLog) {
auXTraPar = new XesTraceParser(trace, this);
this.traceParsers.add(auXTraPar);
}
return this.xesEventClassifier.getTaskClasses();
}
@Override
public LogEventClassifier getEventClassifier() {
return this.xesEventClassifier;
}
public XLog getFirstXLog() {
return this.xLogs.get(0);
}
@Override
protected AbstractLogParser makeACopy(
TaskCharEncoderDecoder taChaEncoDeco,
TaskCharArchive taskCharArchive,
List<LogTraceParser> traceParsers,
Integer startingTrace,
Integer subLogLength) {
return new XesLogParser(taChaEncoDeco, taskCharArchive, traceParsers, startingTrace, subLogLength, parser, xesEventClassifier, xLogs);
}
/**
* Returns a XesLogParser reading the union of the input logs (parsers)
* BEWARE valid only if the TaskCharArchive and TaskCharEncoderDecoder of the inputs are equivalent!
* Otherwise, an exception is risen.
*\
* @param xlp1
* @param xlp2
* @return
*/
public static XesLogParser mergeParsersWithEquivalentTaskChars(XesLogParser xlp1, XesLogParser xlp2) {
if (!xlp1.taChaEncoDeco.equals(xlp2.taChaEncoDeco)){
throw new IllegalArgumentException("The tasks encoders of the input parsers are different");
}
TaskCharEncoderDecoder taChaEncoDeco = xlp1.taChaEncoDeco; // TODO merge the two input logs
TaskCharArchive taskCharArchive = xlp1.taskCharArchive; // TODO merge the two input logs
List<LogTraceParser> traceParsers = new ArrayList<>();
traceParsers.addAll(xlp1.traceParsers);
traceParsers.addAll(xlp2.traceParsers);
Integer startingTrace = 0;
Integer subLogLength = xlp1.length() + xlp2.length();
XParser parser = new XesXmlParser();
XesEventClassifier xesEventClassifier= xlp1.xesEventClassifier;
List<XLog> xLogs=new ArrayList<>();
XLog mLog= xlp1.getFirstXLog();
mLog.addAll(xlp2.getFirstXLog());
xLogs.add(mLog);
return new XesLogParser(
taChaEncoDeco,
taskCharArchive,
traceParsers,
startingTrace,
subLogLength,
parser,
xesEventClassifier,
xLogs);
}
} | 6,068 | 31.805405 | 136 | java |
Janus | Janus-master/src/minerful/logparser/XesTaskClass.java | package minerful.logparser;
import javax.xml.bind.annotation.XmlTransient;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskClass;
import org.deckfour.xes.classification.XEventClass;
public class XesTaskClass extends AbstractTaskClass implements TaskClass {
@XmlTransient
public XEventClass xEventClass;
protected XesTaskClass() {
super();
}
public XesTaskClass(XEventClass xEventClass) {
this.xEventClass = xEventClass;
super.setName(xEventClass.getId());
}
@Override
public int compareTo(TaskClass o) {
if (o instanceof XesTaskClass) {
return this.xEventClass.compareTo(((XesTaskClass) o).xEventClass);
}
else {
return super.compareTo(o);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((xEventClass == null) ? 0 : xEventClass.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
XesTaskClass other = (XesTaskClass) obj;
if (xEventClass == null) {
if (other.xEventClass != null)
return false;
} else if (!xEventClass.equals(other.xEventClass))
return false;
return true;
}
} | 1,283 | 21.137931 | 74 | java |
Janus | Janus-master/src/minerful/logparser/XesTraceParser.java | package minerful.logparser;
import java.util.Iterator;
import java.util.ListIterator;
import org.deckfour.xes.model.XEvent;
import org.deckfour.xes.model.XTrace;
public class XesTraceParser extends AbstractTraceParser implements LogTraceParser {
private XTrace xesTrace;
private XesEventParser xesEventParser;
protected XesLogParser xesLogParser;
private ListIterator<XEvent> traceIterator;
public XesTraceParser(XTrace xesTrace, XesLogParser xesLogParser) {
this.xesTrace = xesTrace;
this.xesLogParser = xesLogParser;
this.traceIterator = xesTrace.listIterator();
this.parsing = true;
}
@Override
public int length() {
return this.xesTrace.size();
}
@Override
public LogParser getLogParser() {
return this.xesLogParser;
}
@Override
public void init() {
switch (this.getSenseOfReading()) {
case BACKWARDS:
this.traceIterator = xesTrace.listIterator(this.length());
break;
case ONWARDS:
default:
this.traceIterator = xesTrace.listIterator();
break;
}
this.parsing = true;
}
@Override
public boolean isParsingOver() {
return (this.isParsing() &&
// For some unforeseeable reason, if this.traceIterator.previousIndex() == 0, this.traceIterator.hasPrevious() returns false, even though it is by all means WRONG. Is it a bug in Java 7.0?
(this.senseOfReading.equals(SenseOfReading.BACKWARDS) && this.traceIterator.previousIndex() < 0)
||
(this.senseOfReading.equals(SenseOfReading.ONWARDS) && !this.traceIterator.hasNext()));
}
@Override
public Character parseSubsequentAndEncode() {
Character encodedEvent = null;
if (stepToSubsequent()) {
encodedEvent = xesEventParser.evtIdentifier();
}
return encodedEvent;
}
@Override
public String encodeTrace() {
Iterator<XEvent> auxIterator = xesTrace.iterator();
StringBuilder sBuil = new StringBuilder();
while (auxIterator.hasNext()) {
sBuil.append(new XesEventParser(this, auxIterator.next()).evtIdentifier());
}
return sBuil.toString();
}
@Override
public String printStringTrace() {
Iterator<XEvent> auxIterator = xesTrace.iterator();
StringBuilder sBuil = new StringBuilder();
while (auxIterator.hasNext()) {
sBuil.append(',');
sBuil.append(new XesEventParser(this, auxIterator.next()).getEvent().getTaskClass());
}
sBuil.delete(0, 1);
sBuil.append('>');
sBuil.insert(0, '<');
return sBuil.toString();
}
@Override
public LogEventParser parseSubsequent() {
if (stepToSubsequent()) {
return xesEventParser;
}
return null;
}
@Override
public boolean stepToSubsequent() {
if (!isParsingOver()) {
switch(this.senseOfReading) {
case ONWARDS:
this.xesEventParser = new XesEventParser(this, this.traceIterator.next());
break;
case BACKWARDS:
this.xesEventParser = new XesEventParser(this, this.traceIterator.previous());
default:
break;
}
} else {
this.xesEventParser = null;
this.parsing = false;
}
return isParsing();
}
@Override
public String getName() {
return this.xesTrace.getAttributes().get("concept:name").toString();
}
} | 3,085 | 23.887097 | 188 | java |
Janus | Janus-master/src/minerful/logparser/utils/FromXesToTextFile.java | package minerful.logparser.utils;
import java.io.File;
import java.io.PrintWriter;
import java.util.Iterator;
import minerful.logparser.LogEventClassifier.ClassificationType;
import minerful.logparser.LogTraceParser;
import minerful.logparser.XesLogParser;
public class FromXesToTextFile {
public static void main(String[] args) throws Exception {
if (args.length < 3) {
System.err.println("Usage: java " + FromXesToTextFile.class.getName() + " <xes-file-in> <string-file-out> <dictionary-file-out>");
System.exit(1);
}
File xesFileIn = new File(args[0]);
File textFileOut = new File(args[1]);
File dicFileOut = new File(args[2]);
XesLogParser logParser = new XesLogParser(xesFileIn, ClassificationType.LOG_SPECIFIED);
Iterator<LogTraceParser> traParserIt = logParser.traceIterator();
PrintWriter priWri = new PrintWriter(textFileOut);
// Encode the event log and store it in args[1]
while (traParserIt.hasNext()) {
priWri.println(traParserIt.next().encodeTrace());
}
priWri.flush();
priWri.close();
// Print out the dictionary in args[2]
priWri = new PrintWriter(dicFileOut);
priWri.println(logParser.getEventEncoderDecoder());
priWri.flush();
priWri.close();
System.exit(0);
}
}
| 1,248 | 27.386364 | 133 | java |
Janus | Janus-master/src/minerful/logparser/utils/RandomLogSampler.java | package minerful.logparser.utils;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.List;
import org.deckfour.xes.in.XesXmlGZIPParser;
import org.deckfour.xes.in.XesXmlParser;
import org.deckfour.xes.model.XLog;
import org.deckfour.xes.out.XesXmlGZIPSerializer;
public class RandomLogSampler {
public static void main(String[] args) throws Exception {
if (args.length < 3) {
System.err.println("Usage: java " + RandomLogSampler.class.getName() + " <xes-file-in> <number-of-traces> <xes-file-out>");
}
File xesFileIn = new File(args[0]);
int numberOfTraces = Integer.valueOf(args[1]);
File xesFileOut = new File(args[2]);
XesXmlParser parser = new XesXmlParser();
if (!parser.canParse(xesFileIn)) {
parser = new XesXmlGZIPParser();
if (!parser.canParse(xesFileIn)) {
throw new IllegalArgumentException("Unparsable log file: " + xesFileIn.getAbsolutePath());
}
}
List<XLog> xLogs = parser.parse(xesFileIn);
XLog xLog = xLogs.remove(0);
while (xLogs.size() > 0) {
xLog.addAll(xLogs.remove(0));
}
int remainingTraces = xLog.size();
while (remainingTraces > numberOfTraces && remainingTraces > 0) {
Math.floor(Math.random() * remainingTraces);
xLog.remove((int) Math.floor(Math.random() * remainingTraces));
remainingTraces--;
}
OutputStream outStream = new FileOutputStream(xesFileOut);
new XesXmlGZIPSerializer().serialize(xLog, outStream);
}
}
| 1,589 | 32.829787 | 126 | java |
Janus | Janus-master/src/minerful/logparser/utils/package-info.java | /**
*
*/
/**
* @author claudio
*
*/
package minerful.logparser.utils; | 75 | 8.5 | 33 | java |
Janus | Janus-master/src/minerful/miner/AbstractConstraintsMiner.java | package minerful.miner;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
import minerful.miner.stats.GlobalStatsTable;
import org.apache.log4j.Logger;
public abstract class AbstractConstraintsMiner implements ConstraintsMiner {
protected static Logger logger;
protected GlobalStatsTable globalStats;
protected TaskCharArchive taskCharArchive;
protected Set<TaskChar> tasksToQueryFor;
protected Double supportThreshold = null;
protected Double confidenceThreshold = null;
protected Double interestFactorThreshold = null;
protected long computedConstraintsAboveThresholds = 0;
public AbstractConstraintsMiner(GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor) {
this.globalStats = globalStats;
this.taskCharArchive = taskCharArchive;
this.tasksToQueryFor = tasksToQueryFor;
if (logger == null)
logger = Logger.getLogger(AbstractConstraintsMiner.class.getCanonicalName());
}
@Override
public Double getSupportThreshold() {
return supportThreshold;
}
@Override
public void setSupportThreshold(Double supportThreshold) {
this.supportThreshold = supportThreshold;
}
@Override
public Double getConfidenceThreshold() {
return confidenceThreshold;
}
@Override
public void setConfidenceThreshold(Double confidenceThreshold) {
this.confidenceThreshold = confidenceThreshold;
}
@Override
public Double getInterestFactorThreshold() {
return interestFactorThreshold;
}
@Override
public void setInterestFactorThreshold(Double interestFactorThreshold) {
this.interestFactorThreshold = interestFactorThreshold;
}
@Override
public Set<TaskChar> getTasksToQueryFor() {
return tasksToQueryFor;
}
@Override
public ConstraintsBag discoverConstraints() {
return this.discoverConstraints(null);
}
static int computeHeuristicSizeForHashSets(int supposedCapacity) {
return supposedCapacity * 2;
}
static Set<Constraint> makeTemporarySet(int supposedCapacity) {
return new HashSet<Constraint>(computeHeuristicSizeForHashSets(supposedCapacity));
}
static Set<Constraint> makeTemporarySet() {
return new TreeSet<Constraint>();
}
static SortedSet<Constraint> makeNavigableSet(Set<Constraint> temporarySet) {
return new TreeSet<Constraint>(temporarySet);
}
static SortedSet<Constraint> makeNavigableSet() {
return new TreeSet<Constraint>();
}
@Override
public boolean hasSufficientSupport(Constraint c) {
return (this.supportThreshold == null ? true : c.hasSufficientSupport(this.supportThreshold));
}
@Override
public boolean hasSufficientConfidence(Constraint c) {
return (this.confidenceThreshold == null ? true : c.hasSufficientConfidence(this.confidenceThreshold));
}
@Override
public boolean hasSufficientInterestFactor(Constraint c) {
return (this.interestFactorThreshold == null ? true : c.hasSufficientSupport(this.interestFactorThreshold));
}
@Override
public boolean hasValuesAboveThresholds(Constraint c) {
return this.hasSufficientSupport(c) && this.hasSufficientConfidence(c) && this.hasSufficientInterestFactor(c);
}
@Override
public long getComputedConstraintsAboveTresholds() {
return computedConstraintsAboveThresholds;
}
} | 3,531 | 28.932203 | 131 | java |
Janus | Janus-master/src/minerful/miner/ConstraintsMiner.java | package minerful.miner;
import java.util.Set;
import minerful.concept.TaskChar;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
public interface ConstraintsMiner {
ConstraintsBag discoverConstraints();
ConstraintsBag discoverConstraints(
ConstraintsBag constraintsBag);
long howManyPossibleConstraints();
long getComputedConstraintsAboveTresholds();
boolean hasValuesAboveThresholds(Constraint c);
boolean hasSufficientInterestFactor(Constraint c);
boolean hasSufficientConfidence(Constraint c);
boolean hasSufficientSupport(Constraint c);
void setInterestFactorThreshold(Double interestFactorThreshold);
Double getInterestFactorThreshold();
void setConfidenceThreshold(Double confidenceThreshold);
Double getConfidenceThreshold();
void setSupportThreshold(Double supportThreshold);
Double getSupportThreshold();
Set<TaskChar> getTasksToQueryFor();
}
| 936 | 21.853659 | 65 | java |
Janus | Janus-master/src/minerful/miner/DeterministicExistenceConstraintsMiner.java | package minerful.miner;
import java.util.Set;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.existence.End;
import minerful.concept.constraint.existence.Init;
import minerful.concept.constraint.existence.Participation;
import minerful.concept.constraint.existence.AtMostOne;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.LocalStatsWrapper;
public class DeterministicExistenceConstraintsMiner extends ExistenceConstraintsMiner {
public DeterministicExistenceConstraintsMiner(GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor) {
super(globalStats, taskCharArchive, tasksToQueryFor);
}
@Override
protected Constraint discoverParticipationConstraint(TaskChar base, LocalStatsWrapper localStats, long testbedSize) {
for (int num: localStats.repetitions.keySet()) {
if (num > 0)
return new Participation(base);
}
return null;
}
@Deprecated
protected int guessLeastExistenceConstraint(LocalStatsWrapper localStats) {
// Did the character ever miss from the testbed case?
if (localStats.repetitions.containsKey(0))
return 0;
return 1;
// Very very rough: a little statistical analysis on the trend would be better
}
@Override
protected Constraint discoverAtMostOnceConstraint(TaskChar base, LocalStatsWrapper localStats, long testbedSize) {
if (localStats.repetitions.containsKey(0))
return null;
return new AtMostOne(base);
// Very very rough: a little statistical analysis on the trend would work better
}
@Deprecated
protected int guessMaximumExistenceConstraint(LocalStatsWrapper localStats) {
for (int num: localStats.repetitions.keySet()) {
if (num > 1)
return Integer.MAX_VALUE;
}
return 1;
// Very very rough: a little statistical analysis on the trend would be better
}
@Override
protected Constraint discoverInitConstraint(TaskChar base, LocalStatsWrapper localStats, long testbedSize) {
Constraint init = null;
if (!(localStats.repetitions.containsKey(0) && localStats.repetitions.get(0) > 0)) {
if (localStats.getAppearancesAsFirst() >= this.globalStats.logSize) {
return new Init(base);
}
}
return init;
}
@Override
protected Constraint discoverEndConstraint(TaskChar base, LocalStatsWrapper localStats, long testbedSize) {
Constraint end = null;
if (!(localStats.repetitions.containsKey(0) && localStats.repetitions.get(0) > 0)) {
if (localStats.getAppearancesAsLast() >= this.globalStats.logSize) {
return new End(base);
}
}
return end;
}
} | 2,765 | 34.461538 | 142 | java |
Janus | Janus-master/src/minerful/miner/DeterministicRelationConstraintsMiner.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.miner;
import java.util.Set;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.relation.AlternatePrecedence;
import minerful.concept.constraint.relation.AlternateResponse;
import minerful.concept.constraint.relation.AlternateSuccession;
import minerful.concept.constraint.relation.ChainPrecedence;
import minerful.concept.constraint.relation.ChainResponse;
import minerful.concept.constraint.relation.ChainSuccession;
import minerful.concept.constraint.relation.CoExistence;
import minerful.concept.constraint.relation.NotChainSuccession;
import minerful.concept.constraint.relation.NotCoExistence;
import minerful.concept.constraint.relation.NotSuccession;
import minerful.concept.constraint.relation.Precedence;
import minerful.concept.constraint.relation.RelationConstraint;
import minerful.concept.constraint.relation.RespondedExistence;
import minerful.concept.constraint.relation.Response;
import minerful.concept.constraint.relation.Succession;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.LocalStatsWrapper;
import minerful.miner.stats.StatsCell;
@Deprecated
public class DeterministicRelationConstraintsMiner extends RelationConstraintsMiner {
public DeterministicRelationConstraintsMiner(GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor) {
super(globalStats, taskCharArchive, tasksToQueryFor);
}
@Override
public ConstraintsBag discoverConstraints(ConstraintsBag constraintsBag) {
/* Inizialization */
if (constraintsBag == null)
constraintsBag = new ConstraintsBag(tasksToQueryFor);
LocalStatsWrapper auxLocalStats = null;
Set<Constraint> auxRelCons = super.makeTemporarySet(
MetaConstraintUtils.howManyDiscoverableConstraints(tasksToQueryFor.size(), this.taskCharArchive.size()));
for (TaskChar tCh: tasksToQueryFor) {
auxLocalStats = this.globalStats.statsTable.get(tCh.identifier);
// Avoid the famous rule: EX FALSO QUOD LIBET! Meaning: if you have no occurrence of a character, each constraint is potentially valid on it. Thus, it is perfectly useless to indagate over it -- and believe me, if you remove this check, it actually happens you have every possible restrictive constraint as valid in the list!
if (auxLocalStats.getTotalAmountOfOccurrences() > 0) {
auxRelCons.addAll(
this.discoverRelationConstraints(tCh, constraintsBag));
}
}
auxRelCons = this.refineRelationConstraints(auxRelCons);
for (Constraint relCon: auxRelCons)
constraintsBag.add(relCon.getBase(), relCon);
return constraintsBag;
}
// Very very rough: a little statistical analysis on the trend would be better
@Override
protected Set<Constraint> discoverRelationConstraints(TaskChar taskChUnderAnalysis, ConstraintsBag constraintsBag) {
LocalStatsWrapper localStats = globalStats.statsTable.get(taskChUnderAnalysis);
// For each other character
StatsCell auxStatsCell = null;
boolean never = false, neverAfter = false, neverBefore = false,
alwaysOneStepAfter = false, alwaysOneStepBefore = false,
alwaysNeverAlternatingAfter = false, alwaysNeverAlternatingBefore = false,
alwaysNever = false, alwaysNeverAfter = false, alwaysNeverOneStepAfter = false;
Set<Constraint> relaCons = super.makeTemporarySet(
MetaConstraintUtils.howManyDiscoverableRelationConstraints(tasksToQueryFor.size(), this.taskCharArchive.size()));
for (TaskChar other: localStats.interplayStatsTable.keySet()) {
never = false;
neverAfter = false;
neverBefore = false;
alwaysOneStepAfter = false;
alwaysOneStepBefore = false;
alwaysNever = false;
alwaysNeverAfter = false;
alwaysNeverOneStepAfter = false;
if (!other.equals(taskChUnderAnalysis)) {
auxStatsCell = localStats.interplayStatsTable.get(other);
// Did it ever happen to the analyzed character NOT to appear WHENEVER the base character occurred?
never = (auxStatsCell.howManyTimesItNeverAppearedAtAll() > 0);
// If not, probably it's a RespondedExistence
if (!never) {
// If a RespondedExistence holds, is it a Response? To know this, you should check whether it NEVER happened to the analyzed character NOT to appear AFTER the base character occurred
neverAfter = (auxStatsCell.howManyTimesItNeverAppearedOnwards() > 0);
// If it is always true that AFTER the base character occurs, the analyzed one appears in the trace as well, then...
if (!neverAfter) {
// ... the AlternateResponse holds if and only if it NEVER happens that the base character appears in the middle of the subtrace between itself and the analyzed one AFTER
alwaysNeverAlternatingAfter = (
auxStatsCell.betweenOnwards == 0
);
if (alwaysNeverAlternatingAfter) {
// ... the ChainResponse holds if and only if the number of appearances of the analyzed character falling one step AFTER the base one is equal to or greater than the total amount of occurrences of the base character
alwaysOneStepAfter = (
auxStatsCell.distances.get(1) != null
&& localStats.getTotalAmountOfOccurrences() <=
auxStatsCell.distances.get(1));
}
}
// If a RespondedExistence holds, is it a Precedence? To know this, you should check whether it NEVER happened to the analyzed character NOT to appear BEFORE the base character occurred
neverBefore = (auxStatsCell.howManyTimesItNeverAppearedBackwards() > 0);
// If it is always true that BEFORE the base character occurs, the analyzed one appears in the trace as well, then...
if (!neverBefore) {
// ... the AlternateResponse holds if and only if it NEVER happens that the OTHER character appears in the middle of the subtrace between itself and the analyzed one BEFORE
alwaysNeverAlternatingBefore = (
auxStatsCell.betweenOnwards == 0
);
if (alwaysNeverAlternatingBefore) {
// ... the ChainPrecedence holds if and only if the number of appearances of the analyzed character falling one step BEFORE the base one is equal to or greater than the total amount of occurrences of the base character
alwaysOneStepBefore = (
auxStatsCell.distances.get(-1) != null
&& localStats.getTotalAmountOfOccurrences() <=
auxStatsCell.distances.get(-1));
}
}
}
// NotCoExistence(a, b)
alwaysNever = (auxStatsCell.howManyTimesItNeverAppearedAtAll() == localStats.getTotalAmountOfOccurrences());
if (!alwaysNever) {
// NotSuccession
alwaysNeverAfter = auxStatsCell.howManyTimesItNeverAppearedOnwards() == localStats.getTotalAmountOfOccurrences();
if (!alwaysNeverAfter) {
// NotChainSuccession
alwaysNeverOneStepAfter = auxStatsCell.distances.get(1) == null || auxStatsCell.distances.get(1) < 1;
}
}
if (!never) {
if (!neverAfter) {
if (alwaysNeverAlternatingAfter) {
if (alwaysOneStepAfter) {
relaCons.add(new ChainResponse(taskChUnderAnalysis, other));
}
else {
relaCons.add(new AlternateResponse(taskChUnderAnalysis, other));
}
}
else
relaCons.add(new Response(taskChUnderAnalysis, other));
}
if (!neverBefore) {
if (alwaysNeverAlternatingBefore) {
if (alwaysOneStepBefore) {
relaCons.add(new ChainPrecedence(other, taskChUnderAnalysis));
}
else {
relaCons.add(new AlternatePrecedence(other, taskChUnderAnalysis));
}
}
else
relaCons.add(new Precedence(other, taskChUnderAnalysis));
}
if (neverAfter && neverBefore) {
relaCons.add(new RespondedExistence(taskChUnderAnalysis, other));
}
}
if (alwaysNever) {
relaCons.add(new NotCoExistence(taskChUnderAnalysis, other));
} else {
if (alwaysNeverAfter) {
relaCons.add(new NotSuccession(taskChUnderAnalysis, other));
} else {
if (alwaysNeverOneStepAfter) {
relaCons.add(new NotChainSuccession(taskChUnderAnalysis, other));
}
}
}
}
}
return relaCons;
}
@Override
protected Set<Constraint> refineRelationConstraints(Set<Constraint> setOfConstraints) {
Set<Constraint> auxSet = super.makeTemporarySet(
MetaConstraintUtils.howManyDiscoverableConstraints(tasksToQueryFor.size(), this.taskCharArchive.size()));
RelationConstraint auxConstraint = null, testConstraint = null;
RelationConstraint[] refinedConstraints = null;
for (Constraint c: auxSet) {
auxConstraint = (RelationConstraint)c;
// ChainSuccession(a, b) == ChainResponse(a, b) && ChainPrecedence(a, b)
if (auxConstraint instanceof ChainPrecedence) {
testConstraint = new ChainResponse(auxConstraint.getBase(), auxConstraint.getImplied());
if (setOfConstraints.contains(testConstraint)) {
refinedConstraints = new RelationConstraint[] {
new ChainSuccession(
auxConstraint.getBase(), auxConstraint.getImplied())
};
}
} else if (auxConstraint instanceof ChainResponse) {
testConstraint = new ChainPrecedence(auxConstraint.getBase(), auxConstraint.getImplied());
if (setOfConstraints.contains(testConstraint)) {
refinedConstraints = new RelationConstraint[] {
new ChainSuccession(
auxConstraint.getBase(), auxConstraint.getImplied())
};
}
}
// AlternateSuccession(a, b) == AlternateResponse(a, b) && AlternatePrecedence(a, b)
else if (auxConstraint instanceof AlternatePrecedence) {
testConstraint = new AlternateResponse(auxConstraint.getBase(), auxConstraint.getImplied());
if (setOfConstraints.contains(testConstraint)) {
refinedConstraints = new RelationConstraint[] {
new AlternateSuccession(
auxConstraint.getBase(), auxConstraint.getImplied())
};
}
} else if (auxConstraint instanceof AlternateResponse) {
testConstraint = new AlternatePrecedence(auxConstraint.getBase(), auxConstraint.getImplied());
if (setOfConstraints.contains(testConstraint)) {
refinedConstraints = new RelationConstraint[] {
new AlternateSuccession(
auxConstraint.getBase(), auxConstraint.getImplied())
};
}
}
// Succession(a, b) == Response(a, b) && Precedence(a, b)
else if (auxConstraint instanceof Precedence) {
testConstraint = new Response(auxConstraint.getBase(), auxConstraint.getImplied());
if (setOfConstraints.contains(testConstraint)) {
refinedConstraints = new RelationConstraint[] {
new Succession(
auxConstraint.getBase(), auxConstraint.getImplied())
};
}
} else if (auxConstraint instanceof Response) {
testConstraint = new Precedence(auxConstraint.getBase(), auxConstraint.getImplied());
if (setOfConstraints.contains(testConstraint)) {
refinedConstraints = new RelationConstraint[] {
new Succession(
auxConstraint.getBase(), auxConstraint.getImplied())
};
}
}
// CoExistence(a, b) == RespondedExistence(a, b) && RespondedExistence(b, a)
else if (auxConstraint instanceof RespondedExistence) {
testConstraint = new RespondedExistence(auxConstraint.getImplied(), auxConstraint.getBase());
if (setOfConstraints.contains(testConstraint)) {
refinedConstraints = new RelationConstraint[] {
new CoExistence(
auxConstraint.getBase(), auxConstraint.getImplied()),
new CoExistence(
auxConstraint.getImplied(), auxConstraint.getBase()),
};
}
}
if (refinedConstraints != null) {
setOfConstraints.remove(auxConstraint);
setOfConstraints.remove(testConstraint);
for (Constraint refinedConstraint: refinedConstraints)
setOfConstraints.add(refinedConstraint);
}
testConstraint = null;
refinedConstraints = null;
}
return setOfConstraints;
}
} | 15,142 | 56.143396 | 337 | java |
Janus | Janus-master/src/minerful/miner/ExistenceConstraintsMiner.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.miner;
import java.util.Set;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.concept.constraint.ConstraintsBag;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.LocalStatsWrapper;
public abstract class ExistenceConstraintsMiner extends AbstractConstraintsMiner {
public ExistenceConstraintsMiner(GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor) {
super(globalStats, taskCharArchive, tasksToQueryFor);
}
@Override
public ConstraintsBag discoverConstraints(ConstraintsBag constraintsBag) {
if (constraintsBag == null)
constraintsBag = new ConstraintsBag(tasksToQueryFor);
for (TaskChar task: tasksToQueryFor) {
LocalStatsWrapper localStats = this.globalStats.statsTable.get(task);
TaskChar base = task;
Constraint uniqueness = this.discoverAtMostOnceConstraint(base, localStats, this.globalStats.logSize);
if (uniqueness != null)
constraintsBag.add(base, uniqueness);
Constraint participation = this.discoverParticipationConstraint(base, localStats, this.globalStats.logSize);
if (participation != null)
constraintsBag.add(base, participation);
Constraint init = this.discoverEndConstraint(base, localStats, this.globalStats.logSize);
if (init != null)
constraintsBag.add(base, init);
Constraint end = this.discoverInitConstraint(base, localStats, this.globalStats.logSize);
if (end != null)
constraintsBag.add(base, end);
}
return constraintsBag;
}
@Override
public long howManyPossibleConstraints() {
return MetaConstraintUtils.NUMBER_OF_DISCOVERABLE_EXISTENCE_CONSTRAINT_TEMPLATES * tasksToQueryFor.size();
}
protected abstract Constraint discoverParticipationConstraint(TaskChar base,
LocalStatsWrapper localStats, long testbedSize);
protected abstract Constraint discoverAtMostOnceConstraint(TaskChar base,
LocalStatsWrapper localStats, long testbedSize);
protected abstract Constraint discoverInitConstraint(TaskChar base,
LocalStatsWrapper localStats, long testbedSize);
protected abstract Constraint discoverEndConstraint(TaskChar base,
LocalStatsWrapper localStats, long testbedSize);
} | 2,615 | 40.52381 | 132 | java |
Janus | Janus-master/src/minerful/miner/ProbabilisticExistenceConstraintsMiner.java | package minerful.miner;
import java.util.Collection;
import java.util.Set;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.ConstraintFamily.RelationConstraintSubFamily;
import minerful.concept.constraint.existence.End;
import minerful.concept.constraint.existence.Init;
import minerful.concept.constraint.existence.Participation;
import minerful.concept.constraint.existence.AtMostOne;
import minerful.concept.constraint.relation.NotChainSuccession;
import minerful.concept.constraint.relation.NotSuccession;
import minerful.concept.constraint.relation.RelationConstraint;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.LocalStatsWrapper;
public class ProbabilisticExistenceConstraintsMiner extends ExistenceConstraintsMiner {
public ProbabilisticExistenceConstraintsMiner(GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor) {
super(globalStats, taskCharArchive, tasksToQueryFor);
}
@Override
public ConstraintsBag discoverConstraints(ConstraintsBag constraintsBag) {
if (constraintsBag == null) {
constraintsBag = new ConstraintsBag(this.tasksToQueryFor);
}
LocalStatsWrapper localStats = null;
double pivotParticipationFraction = 0.0;
for (TaskChar pivot: tasksToQueryFor) {
localStats = this.globalStats.statsTable.get(pivot);
// Avoid the famous rule: EX FALSO QUOD LIBET! Meaning: if you have no occurrence of a character, each constraint is potentially valid on it.
// Thus, it is perfectly useless to indagate over it!
if (localStats.getTotalAmountOfOccurrences() > 0) {
Constraint participation = this.discoverParticipationConstraint(pivot, localStats, this.globalStats.logSize);
pivotParticipationFraction = participation.getSupport();
updateConstraint(constraintsBag, pivot, participation, participation.getSupport(), pivotParticipationFraction);
Constraint atMostOne = this.discoverAtMostOnceConstraint(pivot, localStats, this.globalStats.logSize);
updateConstraint(constraintsBag, pivot, atMostOne, atMostOne.getSupport(), pivotParticipationFraction);
Constraint init = this.discoverInitConstraint(pivot, localStats, this.globalStats.logSize);
updateConstraint(constraintsBag, pivot, init, init.getSupport(), pivotParticipationFraction);
Constraint end = this.discoverEndConstraint(pivot, localStats, this.globalStats.logSize);
updateConstraint(constraintsBag, pivot, end, end.getSupport(), pivotParticipationFraction);
if (hasValuesAboveThresholds(participation)) this.computedConstraintsAboveThresholds++;
if (hasValuesAboveThresholds(atMostOne)) this.computedConstraintsAboveThresholds++;
if (hasValuesAboveThresholds(init)) this.computedConstraintsAboveThresholds++;
if (hasValuesAboveThresholds(end)) this.computedConstraintsAboveThresholds++;
}
}
return constraintsBag;
}
protected Constraint updateConstraint(ConstraintsBag constraintsBag,
TaskChar indexingParam, Constraint searchedCon,
double support, double pivotParticipationFraction) {
Constraint con = constraintsBag.getOrAdd(indexingParam, searchedCon);
con.setSupport(support);
con.setEvaluatedOnLog(true);
refineByComputingRelevanceMetrics(con, pivotParticipationFraction);
return con;
}
public static Constraint refineByComputingRelevanceMetrics(Constraint con, double pivotParticipationFraction) {
con.setConfidence(con.getSupport() * pivotParticipationFraction);
con.setInterestFactor(con.getSupport() * pivotParticipationFraction * pivotParticipationFraction);
return con;
}
@Override
protected Constraint discoverParticipationConstraint(TaskChar base,
LocalStatsWrapper localStats, long testbedSize) {
long zeroAppearances = 0;
if (localStats.repetitions.containsKey(0)) {
zeroAppearances += localStats.repetitions.get(0);
}
double oppositeSupport =
(double) zeroAppearances / (double) testbedSize;
return new Participation(base, Constraint.complementSupport(oppositeSupport));
}
@Override
protected Constraint discoverAtMostOnceConstraint(TaskChar base,
LocalStatsWrapper localStats, long testbedSize) {
long appearancesAsUpToOne = 0;
if (localStats.repetitions.containsKey(1)) {
appearancesAsUpToOne += localStats.repetitions.get(1);
if (localStats.repetitions.containsKey(0)) {
appearancesAsUpToOne += localStats.repetitions.get(0);
}
}
double support =
(double) appearancesAsUpToOne / (double) testbedSize;
return new AtMostOne(base, support);
}
@Override
protected Constraint discoverInitConstraint(TaskChar base,
LocalStatsWrapper localStats, long testbedSize) {
// if (!(localStats.repetitions.containsKey(0) && (localStats.repetitions.get(0) > 0))) {
if (localStats.getAppearancesAsFirst() >= testbedSize) {
return new Init(base);
} else {
return new Init(base, ((double) localStats.getAppearancesAsFirst() / (double) testbedSize));
}
// }
// return new Init(base, 0);
}
@Override
protected Constraint discoverEndConstraint(TaskChar base,
LocalStatsWrapper localStats, long testbedSize) {
// if (!(localStats.repetitions.containsKey(0) && localStats.repetitions.get(0) > 0)) {
if (localStats.getAppearancesAsLast() >= testbedSize) {
return new End(base);
} else {
return new End(base, ((double) localStats.getAppearancesAsLast() / (double) testbedSize));
}
// }
// return new End(base, 0);
}
}
| 6,146 | 45.218045 | 150 | java |
Janus | Janus-master/src/minerful/miner/ProbabilisticRelationBranchedConstraintsMiner.java | package minerful.miner;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharSet;
import minerful.concept.TaskCharSetFactory;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.relation.AlternatePrecedence;
import minerful.concept.constraint.relation.AlternateResponse;
import minerful.concept.constraint.relation.ChainPrecedence;
import minerful.concept.constraint.relation.ChainResponse;
import minerful.concept.constraint.relation.Precedence;
import minerful.concept.constraint.relation.RelationConstraint;
import minerful.concept.constraint.relation.RespondedExistence;
import minerful.concept.constraint.relation.Response;
import minerful.index.ConstraintIndexHasseInverseDepthFirstStepper;
import minerful.index.ConstraintIndexHasseMaker;
import minerful.index.ConstraintIndexHassePruner;
import minerful.miner.engine.ProbabilisticRelationInBranchedConstraintsMiningEngine;
import minerful.miner.engine.ProbabilisticRelationOutBranchedConstraintsMiningEngine;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.LocalStatsWrapper;
import minerful.miner.stats.LocalStatsWrapperForCharsets;
import minerful.miner.stats.charsets.TasksSetCounter;
import org.apache.commons.math3.util.ArithmeticUtils;
public class ProbabilisticRelationBranchedConstraintsMiner extends RelationConstraintsMiner {
// TODO To be made user-defined, not a constant within the code
public static final boolean INCLUDE_ALTERNATION = true;
public static final String[] COMPUTED_SUPPORTS =
new String[] {
RespondedExistence.class.getName(),
Response.class.getName(), AlternateResponse.class.getName(), ChainResponse.class.getName(),
Precedence.class.getName(), AlternatePrecedence.class.getName(), ChainPrecedence.class.getName()
};
public static final int NO_LIMITS_IN_BRANCHING = Integer.MAX_VALUE;
protected int branchingLimit = NO_LIMITS_IN_BRANCHING;
protected TaskCharSetFactory taskCharSetFactory;
protected ProbabilisticRelationInBranchedConstraintsMiningEngine inBraDisco;
protected ProbabilisticRelationOutBranchedConstraintsMiningEngine ouBraDisco;
public ProbabilisticRelationBranchedConstraintsMiner(
GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor) {
super(globalStats, taskCharArchive, tasksToQueryFor);
this.taskCharSetFactory = new TaskCharSetFactory(taskCharArchive);
this.inBraDisco = new ProbabilisticRelationInBranchedConstraintsMiningEngine(globalStats);
this.ouBraDisco = new ProbabilisticRelationOutBranchedConstraintsMiningEngine(globalStats);
}
public ProbabilisticRelationBranchedConstraintsMiner(
GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor, int branchingLimit) {
this(globalStats, taskCharArchive, tasksToQueryFor);
this.branchingLimit =
( (branchingLimit < this.taskCharArchive.size())
? branchingLimit
: NO_LIMITS_IN_BRANCHING
);
}
@Override
protected Set<? extends Constraint> refineRelationConstraints(
Set<Constraint> setOfConstraints) {
// TODO Nothing to do, by now
return setOfConstraints;
}
@Override
public ConstraintsBag discoverConstraints(ConstraintsBag constraintsBag) {
// Initialization
if (constraintsBag == null) {
constraintsBag = new ConstraintsBag(tasksToQueryFor);
}
LocalStatsWrapper auxLocalStats = null;
Set<Constraint> auxCons = super.makeTemporarySet();
for (TaskChar tChUnderAnalysis : this.tasksToQueryFor) {
auxLocalStats = this.globalStats.statsTable.get(tChUnderAnalysis);
// Avoid the famous rule: EX FALSO QUOD LIBET! Meaning: if you have no occurrence of a character, each constraint is potentially valid on it. Thus, it is perfectly useless to indagate over it -- and believe me, if you remove this check, it actually happens you have every possible restrictive constraint as valid in the list!
if (auxLocalStats.getTotalAmountOfOccurrences() > 0) {
logger.info("Evaluating constraints for: " + tChUnderAnalysis + "... ");
auxCons.addAll(this.discoverRelationConstraints(tChUnderAnalysis, constraintsBag));
logger.info("Done.");
}
}
for (Constraint con : auxCons) {
RelationConstraint relCon = (RelationConstraint)con;// come on, I know it can only be a Relation Constraint!
if (relCon.isActivationBranched())
constraintsBag.add(relCon.getImplied(), relCon);
else
constraintsBag.add(relCon.getBase(), relCon);
}
return constraintsBag;
}
@Override
protected Set<Constraint> discoverRelationConstraints(TaskChar taskChUnderAnalysis, ConstraintsBag constraintsBag) {
ConstraintIndexHasseMaker
hasseOutMaker = new ConstraintIndexHasseMaker(this.taskCharArchive, this.branchingLimit, taskChUnderAnalysis),
hasseInMaker = new ConstraintIndexHasseMaker(this.taskCharArchive, this.branchingLimit, taskChUnderAnalysis);
ConstraintIndexHasseInverseDepthFirstStepper stepper = new ConstraintIndexHasseInverseDepthFirstStepper(hasseOutMaker.hasseDiagram);
Set<Constraint> discoveredConstraints = new TreeSet<Constraint>();
if (!globalStats.isForBranchedConstraints())
return discoveredConstraints;
LocalStatsWrapper tChUnderAnalysisLocalStats = globalStats.statsTable.get(taskChUnderAnalysis);
// Avoid the famous rule: EX FALSO QUOD LIBET! Meaning: if you have no occurrence of a character, each constraint is potentially valid on it. Thus, it is perfectly useless to indagate over it -- and believe me, if you remove this check, it actually happens you have every possible restrictive constraint as valid in the list!
long tChUnderAnalysisOccurrences = tChUnderAnalysisLocalStats.getTotalAmountOfOccurrences();
if (tChUnderAnalysisOccurrences <= 0)
return discoveredConstraints;
/*
SortedSet<TaskCharSet> combosToAnalyze =
taskCharSetFactory.createAllMultiCharCombosExcludingOneTaskChar(taskChUnderAnalysis, this.branchingLimit);
*/
Map<String, Boolean>
interruptedCalculation = new HashMap<String, Boolean>();
for (String constraintTemplate : COMPUTED_SUPPORTS) {
interruptedCalculation.put(constraintTemplate, false);
}
RespondedExistence
nuOBRespondedExistence = null/*,
nuIBRespondedExistence = null*/;
Response
nuOBResponse = null/*,
nuIBResponse = null*/;
/**/
AlternateResponse
nuOBAlternateResponse = null/*,
nuIBAlternateResponse = null*/;
ChainResponse
nuOBChainResponse = null/*,
nuIBChainResponse = null*/;
Precedence
/*nuOBPrecedence = null,*/
nuIBPrecedence = null;
/**/
AlternatePrecedence
nuIBAlternatePrecedence = null/*,
/*nuOBAlternatePrecedence = null*/;
ChainPrecedence
/*nuOBChainPrecedence = null,*/
nuIBChainPrecedence = null;
/*
CoExistence
nuOBCoExistence = null,
nuIBCoExistence = null;
Succession
nuOBSuccession = null/*,
nuIBSuccession = null;
AlternateSuccession
nuOBAlternateSuccession = null,
nuIBAlternateSuccession = null;
ChainSuccession
nuOBChainSuccession = null,
nuIBChainSuccession = null;
NotCoExistence
nuOBNotCoExistence = null,
nuIBNotCoExistence = null;
NotSuccession
nuOBNotSuccession = null,
nuIBNotSuccession = null;
NotChainSuccession
nuOBNotChainSuccession = null,
nuIBNotChainSuccession = null;
*/
TaskCharSet comboToAnalyze = null;
while (stepper.isThereAnyNodeLeftToAnalyse()) {
comboToAnalyze = stepper.getCurrentTaskCharSet();
/******* Out-branched */
nuOBRespondedExistence = this.ouBraDisco
.discoverBranchedRespondedExistenceConstraints(
taskChUnderAnalysis, tChUnderAnalysisLocalStats, tChUnderAnalysisOccurrences,
comboToAnalyze);
nuOBResponse = this.ouBraDisco
.discoverBranchedResponseConstraints(
taskChUnderAnalysis, tChUnderAnalysisLocalStats, tChUnderAnalysisOccurrences,
comboToAnalyze);
if (INCLUDE_ALTERNATION) {
/**/
nuOBAlternateResponse = this.ouBraDisco
.discoverBranchedAlternateResponseConstraints(
taskChUnderAnalysis,
tChUnderAnalysisLocalStats, tChUnderAnalysisOccurrences,
comboToAnalyze);
/**/
}
nuOBChainResponse = this.ouBraDisco
.discoverBranchedChainResponseConstraints(
taskChUnderAnalysis,
tChUnderAnalysisLocalStats, tChUnderAnalysisOccurrences,
comboToAnalyze);
/* nuOBPrecedence = this.ouBraDisco
.discoverBranchedPrecedenceConstraints(
taskChUnderAnalysis,
comboToAnalyze);
nuOBAlternatePrecedence = this.ouBraDisco
.discoverBranchedAlternatePrecedenceConstraints(
taskChUnderAnalysis,
comboToAnalyze);
nuOBChainPrecedence = this.ouBraDisco
.discoverBranchedChainPrecedenceConstraints(
taskChUnderAnalysis,
comboToAnalyze);
nuOBCoExistence = this.ouBraDisco
.discoverBranchedCoExistenceConstraints(
taskChUnderAnalysis, tChUnderAnalysisAppearances,
comboToAnalyze);
nuOBSuccession = this.ouBraDisco
.discoverBranchedSuccessionConstraints(
taskChUnderAnalysis, tChUnderAnalysisAppearances,
comboToAnalyze);
nuOBAlternateSuccession = this.ouBraDisco
.discoverBranchedAlternateSuccessionConstraints(
taskChUnderAnalysis, tChUnderAnalysisAppearances,
comboToAnalyze);
nuOBChainSuccession = this.ouBraDisco
.discoverBranchedChainSuccessionConstraints(
taskChUnderAnalysis, tChUnderAnalysisAppearances,
comboToAnalyze);
*/
hasseOutMaker.addConstraint(comboToAnalyze, nuOBRespondedExistence);
hasseOutMaker.addConstraint(comboToAnalyze, nuOBResponse);
/**/
if (INCLUDE_ALTERNATION) {
hasseOutMaker.addConstraint(comboToAnalyze, nuOBAlternateResponse);
}
/**/
hasseOutMaker.addConstraint(comboToAnalyze, nuOBChainResponse);
if (hasValuesAboveThresholds(nuOBRespondedExistence)) this.computedConstraintsAboveThresholds++;
if (hasValuesAboveThresholds(nuOBResponse)) this.computedConstraintsAboveThresholds++;
/**/
if (INCLUDE_ALTERNATION) {
if (hasValuesAboveThresholds(nuOBAlternateResponse)) this.computedConstraintsAboveThresholds++;
}
/**/
if (hasValuesAboveThresholds(nuOBChainResponse)) this.computedConstraintsAboveThresholds++;
/*
hasseMaker.addConstraint(comboToAnalyze, nuOBPrecedence);
hasseMaker.addConstraint(comboToAnalyze, nuOBAlternatePrecedence);
hasseMaker.addConstraint(comboToAnalyze, nuOBChainPrecedence);
hasseMaker.addConstraint(comboToAnalyze, nuOBCoExistence);
hasseMaker.addConstraint(comboToAnalyze, nuOBSuccession);
hasseMaker.addConstraint(comboToAnalyze, nuOBAlternateSuccession);
hasseMaker.addConstraint(comboToAnalyze, nuOBChainSuccession);
hasseMaker.addConstraint(comboToAnalyze, nuOBNotCoExistence);
hasseMaker.addConstraint(comboToAnalyze, nuOBNotSuccession);
hasseMaker.addConstraint(comboToAnalyze, nuOBNotChainSuccession);
*/
/******* In-branched */
/* nuIBRespondedExistence = this.inBraDisco.discoverBranchedRespondedExistenceConstraints(
taskChUnderAnalysis,
comboToAnalyze);
nuIBResponse = this.inBraDisco
.discoverBranchedResponseConstraints(
taskChUnderAnalysis,
comboToAnalyze);
nuIBAlternateResponse = this.inBraDisco
.discoverBranchedAlternateResponseConstraints(
taskChUnderAnalysis,
comboToAnalyze);
nuIBChainResponse = this.inBraDisco
.discoverBranchedChainResponseConstraints(
taskChUnderAnalysis,
comboToAnalyze);
*/
nuIBPrecedence = this.inBraDisco
.discoverBranchedPrecedenceConstraints(
taskChUnderAnalysis,
tChUnderAnalysisLocalStats,
tChUnderAnalysisOccurrences,
comboToAnalyze);
/**/
if (INCLUDE_ALTERNATION) {
nuIBAlternatePrecedence = this.inBraDisco
.discoverBranchedAlternatePrecedenceConstraints(
taskChUnderAnalysis,
tChUnderAnalysisLocalStats,
tChUnderAnalysisOccurrences,
comboToAnalyze);
}
/**/
nuIBChainPrecedence = this.inBraDisco
.discoverBranchedChainPrecedenceConstraints(
taskChUnderAnalysis,
tChUnderAnalysisLocalStats,
tChUnderAnalysisOccurrences,
comboToAnalyze);
/*
nuIBCoExistence = this.inBraDisco
.discoverBranchedCoExistenceConstraints(
taskChUnderAnalysis,
tChUnderAnalysisLocalStats,
tChUnderAnalysisAppearances,
comboToAnalyze);
nuIBSuccession = this.inBraDisco
.discoverBranchedSuccessionConstraints(
taskChUnderAnalysis,
tChUnderAnalysisLocalStats,
tChUnderAnalysisAppearances,
comboToAnalyze);
nuIBAlternateSuccession = this.inBraDisco
.discoverBranchedAlternateSuccessionConstraints(
taskChUnderAnalysis,
tChUnderAnalysisLocalStats,
tChUnderAnalysisAppearances,
comboToAnalyze);
nuIBChainSuccession = this.inBraDisco
.discoverBranchedChainSuccessionConstraint(
taskChUnderAnalysis,
tChUnderAnalysisLocalStats,
tChUnderAnalysisAppearances,
comboToAnalyze);
nuIBNotCoExistence = new NotCoExistence(
nuIBCoExistence.getBase(), nuIBCoExistence.getImplied(),
Constraint.complementSupport(nuIBCoExistence.support));
nuIBNotSuccession = new NotSuccession(
nuIBSuccession.getBase(), nuIBSuccession.getImplied(),
Constraint.complementSupport(nuIBSuccession.support));
nuIBNotChainSuccession = new NotChainSuccession(
nuIBChainSuccession.getBase(), nuIBChainSuccession.getImplied(),
Constraint.complementSupport(nuIBChainSuccession.support));
*//*
hasseMaker.addConstraint(comboToAnalyze, nuIBResponse);
hasseMaker.addConstraint(comboToAnalyze, nuIBAlternateResponse);
hasseMaker.addConstraint(comboToAnalyze, nuIBChainResponse);
*/
hasseInMaker.addConstraint(comboToAnalyze, nuIBPrecedence);
/**/
if (INCLUDE_ALTERNATION) {
hasseInMaker.addConstraint(comboToAnalyze, nuIBAlternatePrecedence);
}
/**/
hasseInMaker.addConstraint(comboToAnalyze, nuIBChainPrecedence);
if (hasValuesAboveThresholds(nuIBPrecedence)) this.computedConstraintsAboveThresholds++;
/**/
if (INCLUDE_ALTERNATION) {
if (hasValuesAboveThresholds(nuIBAlternatePrecedence)) this.computedConstraintsAboveThresholds++;
}
/**/
if (hasValuesAboveThresholds(nuIBChainPrecedence)) this.computedConstraintsAboveThresholds++;
/*
hasseMaker.addConstraint(comboToAnalyze, nuIBCoExistence);
hasseMaker.addConstraint(comboToAnalyze, nuIBSuccession);
hasseMaker.addConstraint(comboToAnalyze, nuIBAlternateSuccession);
hasseMaker.addConstraint(comboToAnalyze, nuIBChainSuccession);
hasseMaker.addConstraint(comboToAnalyze, nuIBNotCoExistence);
hasseMaker.addConstraint(comboToAnalyze, nuIBNotSuccession);
hasseMaker.addConstraint(comboToAnalyze, nuIBNotChainSuccession);
*/
/******* Hierarchy and subsumption linking */
nuOBResponse.setConstraintWhichThisIsBasedUpon(nuOBRespondedExistence);
if (INCLUDE_ALTERNATION) {
nuOBAlternateResponse.setConstraintWhichThisIsBasedUpon(nuOBResponse);
nuOBChainResponse.setConstraintWhichThisIsBasedUpon(nuOBAlternateResponse);
}
else {
nuOBChainResponse.setConstraintWhichThisIsBasedUpon(nuOBResponse);
}
// Mind the inversion in roles: nuIBPrecedence -> nuOBRespondedExistence !!
nuIBPrecedence.setConstraintWhichThisIsBasedUpon(nuOBRespondedExistence);
if (INCLUDE_ALTERNATION) {
nuIBAlternatePrecedence.setConstraintWhichThisIsBasedUpon(nuIBPrecedence);
nuIBChainPrecedence.setConstraintWhichThisIsBasedUpon(nuIBAlternatePrecedence);
} else {
nuIBChainPrecedence.setConstraintWhichThisIsBasedUpon(nuIBPrecedence);
}
/*
nuOBPrecedence.setConstraintWhichThisIsBasedUpon(nuIBRespondedExistence);
nuOBAlternatePrecedence.setConstraintWhichThisIsBasedUpon(nuOBPrecedence);
nuOBChainPrecedence.setConstraintWhichThisIsBasedUpon(nuOBAlternatePrecedence);
nuOBCoExistence.setImplyingConstraints(nuOBRespondedExistence, nuIBRespondedExistence);
nuOBSuccession.setConstraintWhichThisIsBasedUpon(nuOBCoExistence);
nuOBSuccession.setImplyingConstraints(nuOBResponse, nuOBPrecedence);
nuOBAlternateSuccession.setImplyingConstraints(nuOBAlternateResponse, nuOBAlternatePrecedence);
nuOBChainSuccession.setImplyingConstraints(nuOBChainResponse, nuOBChainPrecedence);
nuOBNotCoExistence.setOpposedTo(nuOBCoExistence);
nuOBNotCoExistence.setConstraintWhichThisIsBasedUpon(nuOBNotSuccession);
nuOBNotSuccession.setOpposedTo(nuOBSuccession);
nuOBNotSuccession.setConstraintWhichThisIsBasedUpon(nuOBNotChainSuccession);
nuOBNotChainSuccession.setOpposedTo(nuOBChainSuccession);
*/
stepper.moveOneStepAhead();
}
ConstraintIndexHassePruner outPruner = new ConstraintIndexHassePruner(true, hasseOutMaker.hasseDiagram);
outPruner.prune();
ConstraintIndexHassePruner inPruner = new ConstraintIndexHassePruner(false, hasseInMaker.hasseDiagram);
inPruner.prune();
discoveredConstraints.addAll(outPruner.nonRedundantConstraints());
discoveredConstraints.addAll(inPruner.nonRedundantConstraints());
double participationFraction = super.computeParticipationFraction(taskChUnderAnalysis, tChUnderAnalysisLocalStats, globalStats.logSize);
discoveredConstraints = refineByComputingConfidenceLevel(discoveredConstraints, participationFraction);
return discoveredConstraints;
}
public Set<Constraint> refineByComputingConfidenceLevel(Set<Constraint> discoveredConstraints, double participationFraction) {
for (Constraint relCon : discoveredConstraints) {
relCon.setConfidence(relCon.getSupport() * participationFraction);
}
return discoveredConstraints;
}
protected boolean areLocalStatsOkForBranchedConstraintsAnalysis(LocalStatsWrapper pivotLocalStats) {
return (pivotLocalStats instanceof LocalStatsWrapperForCharsets);
}
protected Collection<TaskChar> getTheRestOfTheAlphabet(Collection<TaskChar> alphabet, TasksSetCounter charSetCounter,
TaskChar taskToExclude) {
Collection<TaskChar> supportingTasks = new TreeSet<TaskChar>(alphabet);
supportingTasks.removeAll(charSetCounter.getTaskCharSet());
supportingTasks.remove(taskToExclude);
return supportingTasks;
}
public static boolean isBranchingLimited(int branchingLimit) {
return branchingLimit < NO_LIMITS_IN_BRANCHING;
}
@Override
public long howManyPossibleConstraints() {
int realBranchingLimit =
(this.branchingLimit < this.taskCharArchive.size()
? this.branchingLimit
: this.taskCharArchive.size() - 1);
long numberOfPossibleConstraintsPerActivity = 0;
for (int i = 1; i <= realBranchingLimit; i++) {
numberOfPossibleConstraintsPerActivity +=
ArithmeticUtils
.binomialCoefficient(
this.taskCharArchive.size(), // n
i); // k
}
return
( MetaConstraintUtils.getAllDiscoverableForwardRelationConstraintTemplates().size() -1 + // out-branching
MetaConstraintUtils.getAllDiscoverableBackwardRelationConstraintTemplates().size() -1 // in branching
)
* tasksToQueryFor.size()
* numberOfPossibleConstraintsPerActivity;
}
} | 19,541 | 40.227848 | 337 | java |
Janus | Janus-master/src/minerful/miner/ProbabilisticRelationConstraintsMiner.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.miner;
import java.util.Iterator;
import java.util.NavigableMap;
import java.util.Set;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintFamily.ConstraintImplicationVerse;
import minerful.concept.constraint.ConstraintFamily.RelationConstraintSubFamily;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.concept.constraint.relation.AlternatePrecedence;
import minerful.concept.constraint.relation.AlternateResponse;
import minerful.concept.constraint.relation.AlternateSuccession;
import minerful.concept.constraint.relation.ChainPrecedence;
import minerful.concept.constraint.relation.ChainResponse;
import minerful.concept.constraint.relation.ChainSuccession;
import minerful.concept.constraint.relation.CoExistence;
import minerful.concept.constraint.relation.NotChainSuccession;
import minerful.concept.constraint.relation.NotCoExistence;
import minerful.concept.constraint.relation.NotSuccession;
import minerful.concept.constraint.relation.Precedence;
import minerful.concept.constraint.relation.RelationConstraint;
import minerful.concept.constraint.relation.RespondedExistence;
import minerful.concept.constraint.relation.Response;
import minerful.concept.constraint.relation.Succession;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.LocalStatsWrapper;
import minerful.miner.stats.StatsCell;
import org.apache.commons.math3.distribution.TDistribution;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
public class ProbabilisticRelationConstraintsMiner extends RelationConstraintsMiner {
private final boolean foreseeingDistances;
public ProbabilisticRelationConstraintsMiner(GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor) {
super(globalStats, taskCharArchive, tasksToQueryFor);
this.foreseeingDistances = false;
}
public ProbabilisticRelationConstraintsMiner(GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor, boolean foreseeingDistances) {
super(globalStats, taskCharArchive, tasksToQueryFor);
this.foreseeingDistances = foreseeingDistances;
}
@Override
public ConstraintsBag discoverConstraints(ConstraintsBag constraintsBag) {
// Inizialisation
if (constraintsBag == null) {
constraintsBag = new ConstraintsBag(tasksToQueryFor);
}
LocalStatsWrapper auxLocalStats = null;
// Set<Constraint> auxRelCons = super.makeTemporarySet(
// MetaConstraintUtils.howManyPossibleConstraints(tasksToQueryFor.size(), taskCharArchive.size()));
// for (TaskChar tChUnderAnalysis : tasksToQueryFor) {
// auxLocalStats = this.globalStats.statsTable.get(tChUnderAnalysis);
// // Avoid the famous rule: EX FALSO QUOD LIBET! Meaning: if you have no occurrence of a character, each constraint is potentially valid on it. Thus, it is perfectly useless to indagate over it -- and believe me, if you remove this check, it actually happens you have every possible restrictive constraint as valid in the list!
// if (auxLocalStats.getTotalAmountOfOccurrences() > 0) {
// auxRelCons.addAll(
// this.discoverRelationConstraints(tChUnderAnalysis));
// }
// }
// for (Constraint relCon : auxRelCons) {
// constraintsBag.add(relCon.base, relCon);
// }
// return constraintsBag;
for (TaskChar tChUnderAnalysis : tasksToQueryFor) {
auxLocalStats = this.globalStats.statsTable.get(tChUnderAnalysis);
// Avoid the famous rule: EX FALSO QUOD LIBET! Meaning: if you have no occurrence of a character, each constraint is potentially valid on it. Thus, it is perfectly useless to indagate over it -- and believe me, if you remove this check, it actually happens you have every possible restrictive constraint as valid in the list!
if (auxLocalStats.getTotalAmountOfOccurrences() > 0) {
this.discoverRelationConstraints(tChUnderAnalysis, constraintsBag);
}
}
return constraintsBag;
}
// Very very rough: a little statistical analysis on the trend would be better
@Override
public Set<? extends Constraint> discoverRelationConstraints(TaskChar pivotTask, ConstraintsBag constraintsBag) {
double supportForRespondedExistence = 0.0,
supportForResponse = 0.0,
supportForAlternateResponse = 0.0,
supportForChainResponse = 0.0,
supportForPrecedence = 0.0,
supportForAlternatePrecedence = 0.0,
supportForChainPrecedence = 0.0,
supportForCoExistence = 0.0,
supportForSuccession = 0.0,
supportForAlternateSuccession = 0.0,
supportForChainSuccession = 0.0,
supportForNotCoExistence = 0.0,
supportForNotSuccession = 0.0,
supportForNotChainSuccession = 0.0,
pivotParticipationFraction = 0.0,
searchedParticipationFraction = 0.0;
StatsCell interplayStats = null,
reversedInterplayStats = null;
Set<Constraint>
relaCons = //super.makeTemporarySet(
// MetaConstraintUtils.howManyPossibleRelationConstraints(this.tasksToQueryFor.size(), this.taskCharArchive.size())),
super.makeNavigableSet(),
nuRelaCons = super.makeNavigableSet();
LocalStatsWrapper
pivotLocalStats = globalStats.statsTable.get(pivotTask),
searchedLocalStats = null;
long pivotAppearances = pivotLocalStats.getTotalAmountOfOccurrences(),
searchedAppearances = 0L;
// For each other character
for (TaskChar searchedTask : pivotLocalStats.interplayStatsTable.keySet()) {
nuRelaCons = super.makeNavigableSet();
pivotParticipationFraction = this.computeParticipationFraction(pivotTask, pivotLocalStats, globalStats.logSize);
if (!searchedTask.equals(pivotTask)) {
searchedLocalStats = globalStats.statsTable.get(searchedTask);
interplayStats = pivotLocalStats.interplayStatsTable.get(searchedTask);
reversedInterplayStats = searchedLocalStats.interplayStatsTable.get(pivotTask);
searchedAppearances = searchedLocalStats.getTotalAmountOfOccurrences();
searchedParticipationFraction = this.computeParticipationFraction(searchedTask, searchedLocalStats, globalStats.logSize);
supportForRespondedExistence =
computeSupportForRespondedExistence(interplayStats, pivotAppearances);
supportForResponse =
computeSupportForResponse(interplayStats, pivotAppearances);
supportForAlternateResponse =
computeSupportForAlternateResponse(interplayStats, pivotAppearances);
supportForChainResponse =
computeSupportForChainResponse(interplayStats, pivotAppearances);
supportForPrecedence =
computeSupportForPrecedence(interplayStats, pivotAppearances);
supportForAlternatePrecedence =
computeSupportForAlternatePrecedence(interplayStats, pivotAppearances);
supportForChainPrecedence =
computeSupportForChainPrecedence(interplayStats, pivotAppearances);
supportForCoExistence =
computeSupportForCoExistence(interplayStats, reversedInterplayStats, pivotAppearances + searchedAppearances);
supportForSuccession =
computeSupportForSuccession(interplayStats, reversedInterplayStats, pivotAppearances + searchedAppearances);
supportForAlternateSuccession =
computeSupportForAlternateSuccession(interplayStats, reversedInterplayStats, pivotAppearances + searchedAppearances);
supportForChainSuccession =
computeSupportForChainSuccession(interplayStats, reversedInterplayStats, pivotAppearances + searchedAppearances);
supportForNotCoExistence =
computeSupportForNotCoExistence(interplayStats, reversedInterplayStats, pivotAppearances + searchedAppearances);
supportForNotSuccession =
computeSupportForNotSuccession(interplayStats, reversedInterplayStats, pivotAppearances + searchedAppearances);
supportForNotChainSuccession =
computeSupportForNotChainSuccession(interplayStats, reversedInterplayStats, pivotAppearances + searchedAppearances);
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new RespondedExistence(pivotTask, searchedTask), supportForRespondedExistence, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new Response(pivotTask, searchedTask), supportForResponse, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new AlternateResponse(pivotTask, searchedTask), supportForAlternateResponse, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new ChainResponse(pivotTask, searchedTask), supportForChainResponse, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new Precedence(searchedTask, pivotTask), supportForPrecedence, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new AlternatePrecedence(searchedTask, pivotTask), supportForAlternatePrecedence, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new ChainPrecedence(searchedTask, pivotTask), supportForChainPrecedence, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new CoExistence(pivotTask, searchedTask), supportForCoExistence, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new Succession(pivotTask, searchedTask), supportForSuccession, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new AlternateSuccession(pivotTask, searchedTask), supportForAlternateSuccession, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new ChainSuccession(pivotTask, searchedTask), supportForChainSuccession, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new NotCoExistence(pivotTask, searchedTask), supportForNotCoExistence, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new NotSuccession(pivotTask, searchedTask), supportForNotSuccession, pivotParticipationFraction, searchedParticipationFraction));
nuRelaCons.add(this.updateConstraint(constraintsBag, pivotTask, new NotChainSuccession(pivotTask, searchedTask), supportForNotChainSuccession, pivotParticipationFraction, searchedParticipationFraction));
// precedo.setConstraintWhichThisIsBasedUpon(responExi);
// altPrecedo.setConstraintWhichThisIsBasedUpon(precedo);
// chainPrecedo.setConstraintWhichThisIsBasedUpon(altPrecedo);
// respo.setConstraintWhichThisIsBasedUpon(responExi);
// altRespo.setConstraintWhichThisIsBasedUpon(respo);
// chainRespo.setConstraintWhichThisIsBasedUpon(altRespo);
//
// successio.setConstraintWhichThisIsBasedUpon(coExi);
// altSuccessio.setConstraintWhichThisIsBasedUpon(successio);
// chainSuccessio.setConstraintWhichThisIsBasedUpon(altSuccessio);
//
// notSuccessio.setConstraintWhichThisIsBasedUpon(notChainSuccessio);
// notCoExi.setConstraintWhichThisIsBasedUpon(notSuccessio);
//
// notCoExi.setOpposedTo(coExi);
// notSuccessio.setOpposedTo(successio);
// notChainSuccessio.setOpposedTo(chainSuccessio);
}
Iterator<Constraint> constraintsIterator = nuRelaCons.iterator();
RelationConstraint currentConstraint = null;
while (constraintsIterator.hasNext()) {
currentConstraint = (RelationConstraint) constraintsIterator.next();
if (this.isForeseeingDistances()) {
if (currentConstraint.getImplicationVerse() == ConstraintImplicationVerse.BACKWARD)
refineByComputingDistances(currentConstraint, searchedLocalStats, pivotTask);
else
refineByComputingDistances(currentConstraint, pivotLocalStats, searchedTask);
}
if (hasValuesAboveThresholds(currentConstraint)) this.computedConstraintsAboveThresholds++;
}
relaCons.addAll(nuRelaCons);
}
return relaCons;
}
protected Constraint updateConstraint(ConstraintsBag constraintsBag,
TaskChar indexingParam, Constraint searchedCon,
double support, double pivotParticipationFraction, double searchedParticipationFraction) {
Constraint con = constraintsBag.getOrAdd(indexingParam, searchedCon);
con.setSupport(support);
con.setEvaluatedOnLog(true);
refineByComputingRelevanceMetrics(con, pivotParticipationFraction, searchedParticipationFraction);
return con;
}
private double computeSupportForNotChainSuccession(
StatsCell interplayStats, StatsCell reversedInterplayStats, long sumOfAppearances) {
return Constraint.complementSupport(
this.computeSupportForChainSuccession(
interplayStats, reversedInterplayStats, sumOfAppearances));
}
private double computeSupportForNotSuccession(
StatsCell interplayStats, StatsCell reversedInterplayStats, long sumOfAppearances) {
return Constraint.complementSupport(
this.computeSupportForSuccession(
interplayStats, reversedInterplayStats, sumOfAppearances));
}
private double computeSupportForNotCoExistence(
StatsCell interplayStats, StatsCell reversedInterplayStats, long sumOfAppearances) {
return Constraint.complementSupport(
this.computeSupportForCoExistence(
interplayStats, reversedInterplayStats, sumOfAppearances));
}
private double computeSupportForChainSuccession(
StatsCell interplayStats, StatsCell reversedInterplayStats, long sumOfAppearances) {
if (interplayStats.distances.get(1) != null && reversedInterplayStats.distances.get(-1) != null) {
double support = interplayStats.distances.get(1);
support += reversedInterplayStats.distances.get(-1);
support /= sumOfAppearances;
return support;
} else {
return 0;
}
}
private double computeSupportForAlternateSuccession(
StatsCell interplayStats, StatsCell reversedInterplayStats, long sumOfAppearances) {
double antiSupport = interplayStats.betweenOnwards;
antiSupport += interplayStats.howManyTimesItNeverAppearedOnwards();
antiSupport += reversedInterplayStats.betweenBackwards;
antiSupport += reversedInterplayStats.howManyTimesItNeverAppearedBackwards();
antiSupport /= sumOfAppearances;
return Constraint.complementSupport(antiSupport);
}
private double computeSupportForSuccession(
StatsCell interplayStats, StatsCell reversedInterplayStats, long sumOfAppearances) {
double antiSupport = interplayStats.howManyTimesItNeverAppearedOnwards();
antiSupport += reversedInterplayStats.howManyTimesItNeverAppearedBackwards();
antiSupport /= sumOfAppearances;
return Constraint.complementSupport(antiSupport);
}
private double computeSupportForCoExistence(
StatsCell interplayStats, StatsCell reversedInterplayStats, long sumOfAppearances) {
double antiSupport = interplayStats.howManyTimesItNeverAppearedAtAll();
antiSupport += reversedInterplayStats.howManyTimesItNeverAppearedAtAll();
antiSupport /= sumOfAppearances;
return Constraint.complementSupport(antiSupport);
}
private double computeSupportForChainPrecedence(
StatsCell interplayStats, long appearances) {
if (interplayStats.distances.get(-1) != null) {
double support = interplayStats.distances.get(-1);
support /= appearances;
return support;
} else {
return 0;
}
}
private double computeSupportForAlternatePrecedence(
StatsCell interplayStats, long appearances) {
double antiSupport = interplayStats.betweenBackwards;
antiSupport += interplayStats.howManyTimesItNeverAppearedBackwards();
antiSupport /= appearances;
return Constraint.complementSupport(antiSupport);
}
private double computeSupportForPrecedence(
StatsCell interplayStats, long appearances) {
double antiSupport = interplayStats.howManyTimesItNeverAppearedBackwards();
antiSupport /= appearances;
return Constraint.complementSupport(antiSupport);
}
private double computeSupportForChainResponse(
StatsCell interplayStats, long appearances) {
if (interplayStats.distances.get(1) != null) {
double support = interplayStats.distances.get(1);
support /= appearances;
return support;
} else {
return 0;
}
}
private double computeSupportForAlternateResponse(
StatsCell interplayStats, long appearances) {
double antiSupport = interplayStats.betweenOnwards;
antiSupport += interplayStats.howManyTimesItNeverAppearedOnwards();
antiSupport /= appearances;
return Constraint.complementSupport(antiSupport);
}
private double computeSupportForResponse(
StatsCell interplayStats, long appearances) {
double antiSupport = interplayStats.howManyTimesItNeverAppearedOnwards();
antiSupport /= appearances;
return Constraint.complementSupport(antiSupport);
}
private double computeSupportForRespondedExistence(
StatsCell interplayStats, long appearances) {
double antiSupport = interplayStats.howManyTimesItNeverAppearedAtAll();
antiSupport /= appearances;
return Constraint.complementSupport(antiSupport);
}
@Override
protected Set<Constraint> refineRelationConstraints(Set<Constraint> setOfConstraints) {
return null;
}
public static RelationConstraint refineByComputingConfidenceLevel(RelationConstraint relCon, double pivotParticipationFraction, double searchedParticipationFraction) {
if (relCon.getSubFamily() == RelationConstraintSubFamily.COUPLING || relCon.getSubFamily() == RelationConstraintSubFamily.NEGATIVE) {
relCon.setConfidence(relCon.getSupport() * (pivotParticipationFraction < searchedParticipationFraction ? pivotParticipationFraction : searchedParticipationFraction));
} else if (relCon.getImplicationVerse() == ConstraintImplicationVerse.BACKWARD) {
relCon.setConfidence(relCon.getSupport() * searchedParticipationFraction);
} else {
relCon.setConfidence(relCon.getSupport() * pivotParticipationFraction);
}
return relCon;
}
public static RelationConstraint refineByComputingRelevanceMetrics(Constraint con, double pivotParticipationFraction, double searchedParticipationFraction) {
RelationConstraint relCon = (RelationConstraint) con;
relCon = refineByComputingConfidenceLevel(relCon, pivotParticipationFraction, searchedParticipationFraction);
if (relCon.getSubFamily() != RelationConstraintSubFamily.NEGATIVE || relCon instanceof NotChainSuccession || relCon instanceof NotSuccession) {
relCon.setInterestFactor(
relCon.getSupport()
*
pivotParticipationFraction
*
searchedParticipationFraction
);
} else {
relCon.setInterestFactor(
relCon.getSupport()
*
( pivotParticipationFraction > searchedParticipationFraction
? pivotParticipationFraction * (1.0 - searchedParticipationFraction)
: searchedParticipationFraction * (1.0 - pivotParticipationFraction)
)
);
}
return relCon;
}
private static RelationConstraint refineByComputingDistances(
RelationConstraint relCon,
LocalStatsWrapper implyingLocalStats, TaskChar implied) {
if (relCon instanceof RespondedExistence) {
RespondedExistence resEx = (RespondedExistence)relCon;
SummaryStatistics distancesSumStats = new SummaryStatistics();
NavigableMap<Integer, Integer> distancesMap = implyingLocalStats.interplayStatsTable.get(implied).distances;
// FIXME Watch out, this is by chance so: one thing is saying that the implication verse is from the second parameter towards the first, another thing is to state that the temporal constraint is exerted on the occurrence onwards or backwards
switch (resEx.getImplicationVerse()) {
case FORWARD:
distancesMap = distancesMap.tailMap(0, false).headMap(StatsCell.NEVER_ONWARDS, false);
for (Integer distance : distancesMap.keySet()) {
if (distance != StatsCell.NEVER_EVER) {
for (int i = 0; i < distancesMap.get(distance); i++) {
distancesSumStats.addValue(distance);
}
}
}
break;
case BACKWARD:
distancesMap = distancesMap.tailMap(StatsCell.NEVER_BACKWARDS, false).headMap(0, false);
for (Integer distance : distancesMap.keySet()) {
if (distance != StatsCell.NEVER_EVER) {
for (int i = 0; i < distancesMap.get(distance); i++) {
distancesSumStats.addValue(distance);
}
}
}
break;
case BOTH:
distancesMap = distancesMap.tailMap(StatsCell.NEVER_BACKWARDS, false).headMap(StatsCell.NEVER_ONWARDS, false);
for (Integer distance : distancesMap.keySet()) {
if (distance != StatsCell.NEVER_EVER) {
for (int i = 0; i < distancesMap.get(distance); i++) {
distancesSumStats.addValue(distance);
}
}
}
default:
break;
}
if (distancesSumStats.getN() > 1) {
resEx.expectedDistance = distancesSumStats.getMean();
double tFactor = new TDistribution(distancesSumStats.getN()-1).cumulativeProbability(0.05);
resEx.confidenceIntervalMargin = tFactor * distancesSumStats.getStandardDeviation();
}
}
return relCon;
}
public boolean isForeseeingDistances() {
return foreseeingDistances;
}
} | 23,782 | 53.175399 | 339 | java |
Janus | Janus-master/src/minerful/miner/RelationConstraintsMiner.java | package minerful.miner;
import java.util.Set;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.concept.constraint.ConstraintsBag;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.LocalStatsWrapper;
public abstract class RelationConstraintsMiner extends AbstractConstraintsMiner {
public RelationConstraintsMiner(GlobalStatsTable globalStats, TaskCharArchive taskCharArchive, Set<TaskChar> tasksToQueryFor) {
super(globalStats, taskCharArchive, tasksToQueryFor);
}
@Override
public long howManyPossibleConstraints() {
return MetaConstraintUtils.NUMBER_OF_DISCOVERABLE_RELATION_CONSTRAINT_TEMPLATES * taskCharArchive.size() * (taskCharArchive.size() - 1);
}
protected abstract Set<? extends Constraint> refineRelationConstraints(
Set<Constraint> setOfConstraints);
protected abstract Set<? extends Constraint> discoverRelationConstraints(TaskChar taskChUnderAnalysis,
ConstraintsBag constraintsBag);
protected double computeParticipationFraction(TaskChar base, LocalStatsWrapper localStats,
long testbedSize) {
long zeroAppearances = 0;
if (localStats.repetitions.containsKey(0)) {
zeroAppearances += localStats.repetitions.get(0);
}
double oppositeSupport =
(double) zeroAppearances / (double) testbedSize;
return Constraint.complementSupport(oppositeSupport);
}
} | 1,563 | 37.146341 | 138 | java |
Janus | Janus-master/src/minerful/miner/core/MinerFulKBCore.java | package minerful.miner.core;
import java.util.concurrent.Callable;
import minerful.concept.TaskCharArchive;
import minerful.logparser.LogParser;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.OccurrencesStatsBuilder;
import org.apache.log4j.LogMF;
import org.apache.log4j.Logger;
import org.apache.log4j.Priority;
import org.apache.log4j.PropertyConfigurator;
public class MinerFulKBCore implements Callable<GlobalStatsTable> {
public static final String KNOWLEDGE_BASE_BUILDING_CODE = "'M-KB'";
protected static Logger logger;
protected LogParser logParser;
protected MinerFulCmdParameters minerFulParams;
protected TaskCharArchive taskCharArchive;
public final int jboNum;
{
if (logger == null) {
logger = Logger.getLogger(MinerFulKBCore.class.getCanonicalName());
}
}
public MinerFulKBCore(int coreNum, LogParser logParser,
MinerFulCmdParameters minerFulParams,
TaskCharArchive taskCharArchive) {
this.jboNum = coreNum;
this.logParser = logParser;
this.minerFulParams = minerFulParams;
this.taskCharArchive = taskCharArchive;
}
public void setLogParser(LogParser logParser) {
this.logParser = logParser;
}
public GlobalStatsTable discover() {
logger.info("\nComputing occurrences/distances table...");
Integer branchingLimit = null;
if (!(minerFulParams.branchingLimit.equals(MinerFulCmdParameters.MINIMUM_BRANCHING_LIMIT)))
branchingLimit = minerFulParams.branchingLimit;
long
before = 0L,
after = 0L;
before = System.currentTimeMillis();
// initialize the stats builder
OccurrencesStatsBuilder statsBuilder =
// new OccurrencesStatsBuilder(alphabet, TaskCharEncoderDecoder.CONTEMPORANEITY_CHARACTER_DELIMITER, branchingLimit);
new OccurrencesStatsBuilder(taskCharArchive, branchingLimit);
// builds the (empty) stats table
GlobalStatsTable statsTable = statsBuilder.checkThisOut(logParser);
logger.info("Done!");
after = System.currentTimeMillis();
long occuTabTime = after - before;
this.printComputationStats(occuTabTime);
// By using LogMF from the extras companion write, you will not incur the cost of parameter construction if debugging is disabled for logger
LogMF.trace(logger, "\nStats:\n{0}", statsTable);
return statsTable;
}
public void printComputationStats(long occuTabTime) {
long totalChrs = logParser.numberOfEvents();
int minChrs = logParser.minimumTraceLength(),
maxChrs = logParser.maximumTraceLength();
Double avgChrsPerString = 1.0 * totalChrs / logParser.length();
StringBuffer
csvSummaryBuffer = new StringBuffer(),
csvSummaryLegendBuffer = new StringBuffer(),
csvSummaryComprehensiveBuffer = new StringBuffer();
csvSummaryBuffer.append(MinerFulKBCore.KNOWLEDGE_BASE_BUILDING_CODE);
csvSummaryLegendBuffer.append("'Operation code for KB construction'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(jboNum);
csvSummaryLegendBuffer.append("'Job number'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(logParser.length());
csvSummaryLegendBuffer.append("'Number of traces'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(minChrs);
csvSummaryLegendBuffer.append("'Min events per trace'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(maxChrs);
csvSummaryLegendBuffer.append("'Max events per trace'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(avgChrsPerString);
csvSummaryLegendBuffer.append("'Avg events per trace'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(totalChrs);
csvSummaryLegendBuffer.append("'Events read'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(taskCharArchive.size());
csvSummaryLegendBuffer.append("'Alphabet size'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// csvSummaryLegendBuffer.append("'Total time'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(occuTabTime + exiConTime + relaConTime + pruniTime);
// csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Statistics computation time'");
// csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(occuTabTime);
// csvSummaryBuffer.append(";");
csvSummaryComprehensiveBuffer.append("\n\nTimings' summary: \n");
csvSummaryComprehensiveBuffer.append(csvSummaryLegendBuffer.toString());
csvSummaryComprehensiveBuffer.append("\n");
csvSummaryComprehensiveBuffer.append(csvSummaryBuffer.toString());
logger.info(csvSummaryComprehensiveBuffer.toString());
}
@Override
public GlobalStatsTable call() throws Exception {
return this.discover();
}
} | 5,435 | 38.107914 | 148 | java |
Janus | Janus-master/src/minerful/miner/core/MinerFulPruningCore.java | package minerful.miner.core;
import java.util.Collection;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.constraint.ConstraintsBag;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import minerful.postprocessing.pruning.ConflictAndRedundancyResolver;
import minerful.postprocessing.pruning.SubsumptionHierarchyMarkingPolicy;
import minerful.postprocessing.pruning.ThresholdsMarker;
import minerful.postprocessing.pruning.SubsumptionHierarchyMarker;
import org.apache.log4j.Logger;
public class MinerFulPruningCore {
protected static Logger logger;
protected ProcessModel processModel;
protected Collection<TaskChar> tasksToQueryFor;
protected PostProcessingCmdParameters postProcParams;
protected SubsumptionHierarchyMarker subMarker;
protected ThresholdsMarker threshMarker;
protected ProcessModel fixpointModel;
{
if (logger == null) {
logger = Logger.getLogger(MinerFulQueryingCore.class.getCanonicalName());
}
}
public MinerFulPruningCore(ProcessModel processModel,
PostProcessingCmdParameters postProcParams) {
this(processModel,
processModel.getProcessAlphabet(),
postProcParams);
}
public MinerFulPruningCore(ProcessModel processModel,
PostProcessingCmdParameters postProcParams,
ProcessModel fixpointModel) {
this.processModel = processModel;
this.tasksToQueryFor = processModel.getProcessAlphabet();
this.postProcParams = postProcParams;
this.subMarker = new SubsumptionHierarchyMarker(processModel.bag, fixpointModel.bag);
this.threshMarker = new ThresholdsMarker(processModel.bag, fixpointModel.bag);
this.fixpointModel = fixpointModel;
}
public MinerFulPruningCore(ProcessModel processModel,
Collection<TaskChar> tasksToQueryFor,
PostProcessingCmdParameters postProcParams) {
this.processModel = processModel;
this.tasksToQueryFor = tasksToQueryFor;
this.postProcParams = postProcParams;
this.subMarker = new SubsumptionHierarchyMarker(processModel.bag);
// FIXME Make it parametric
this.subMarker.setPolicy(SubsumptionHierarchyMarkingPolicy.EAGER_ON_SUPPORT_OVER_HIERARCHY);
this.threshMarker = new ThresholdsMarker(processModel.bag);
}
public ConstraintsBag massageConstraints() {
logger.info("Post-processing the discovered model...");
if (this.postProcParams.postProcessingAnalysisType.isPostProcessingRequested()) {
this.markConstraintsBelowThresholds();
if (this.postProcParams.postProcessingAnalysisType.isHierarchySubsumptionResolutionRequested()) {
this.markRedundancyBySubsumptionHierarchy();
}
if (this.postProcParams.postProcessingAnalysisType.isRedundancyResolutionRequested()) {
this.detectConflictsOrRedundancies();
}
}
if (this.postProcParams.cropRedundantAndInconsistentConstraints) {
this.processModel.bag.removeMarkedConstraints();
}
return this.processModel.bag;
}
private ConstraintsBag markConstraintsBelowThresholds() {
logger.info("Pruning constraints below thresholds...");
long beforeThresholdsPruning = System.currentTimeMillis();
this.processModel.bag = this.threshMarker.markConstraintsBelowThresholds(
this.postProcParams.supportThreshold,
this.postProcParams.confidenceThreshold,
this.postProcParams.interestFactorThreshold);
long afterThresholdsPruning = System.currentTimeMillis();
this.threshMarker.printComputationStats(beforeThresholdsPruning, afterThresholdsPruning);
if (this.postProcParams.cropRedundantAndInconsistentConstraints) {
this.processModel.bag.removeMarkedConstraints();
}
// Let us try to free memory!
System.gc();
return this.processModel.bag;
}
private ConstraintsBag detectConflictsOrRedundancies() {
long beforeConflictResolution = System.currentTimeMillis();
ConflictAndRedundancyResolver confliReso;
if (fixpointModel == null) {
confliReso = new ConflictAndRedundancyResolver(processModel, postProcParams);
} else {
confliReso = new ConflictAndRedundancyResolver(processModel, postProcParams, fixpointModel);
}
// this.processModel = confliReso.resolveConflictsOrRedundancies();
confliReso.resolveConflictsOrRedundancies();
long afterConflictResolution = System.currentTimeMillis();
confliReso.printComputationStats(beforeConflictResolution, afterConflictResolution);
if (this.postProcParams.cropRedundantAndInconsistentConstraints) {
this.processModel.bag.removeMarkedConstraints();
}
// Let us try to free memory!
System.gc();
return this.processModel.bag;
}
public ConstraintsBag markRedundancyBySubsumptionHierarchy() {
long
beforeSubCheck = 0L,
afterSubCheck = 0L;
// if (!this.postProcParams.cropRedundantAndInconsistentConstraints) {
// this.processModel.resetMarks();
// }
logger.info("Pruning redundancy, on the basis of hierarchy subsumption...");
beforeSubCheck = System.currentTimeMillis();
if (this.subMarker.getFixpointConstraintsBag() != null) {
this.subMarker.markSubsumptionRedundantConstraintsFromSeed(this.tasksToQueryFor);
} else {
this.subMarker.markSubsumptionRedundantConstraints(this.tasksToQueryFor);
}
afterSubCheck = System.currentTimeMillis();
this.subMarker.printComputationStats(beforeSubCheck, afterSubCheck);
if (this.postProcParams.cropRedundantAndInconsistentConstraints) {
this.processModel.bag.removeMarkedConstraints();
}
// Let us try to free memory!
System.gc();
return this.processModel.bag;
}
public ProcessModel getProcessModel() {
return this.processModel;
}
} | 6,322 | 37.321212 | 109 | java |
Janus | Janus-master/src/minerful/miner/core/MinerFulQueryingCore.java | package minerful.miner.core;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.ConstraintsBag;
import minerful.logparser.LogParser;
import minerful.miner.ConstraintsMiner;
import minerful.miner.ProbabilisticExistenceConstraintsMiner;
import minerful.miner.ProbabilisticRelationBranchedConstraintsMiner;
import minerful.miner.ProbabilisticRelationConstraintsMiner;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.miner.stats.GlobalStatsTable;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.log4j.Logger;
public class MinerFulQueryingCore implements Callable<ConstraintsBag> {
public static final String KB_QUERYING_CODE = "'M-Q'";
protected static Logger logger;
protected LogParser logParser;
protected MinerFulCmdParameters minerFulParams;
protected PostProcessingCmdParameters postPrarams;
protected TaskCharArchive taskCharArchive;
protected GlobalStatsTable statsTable;
private Set<TaskChar> tasksToQueryFor;
protected ConstraintsBag bag;
public final int jobNum;
{
if (logger == null) {
logger = Logger.getLogger(MinerFulQueryingCore.class.getCanonicalName());
}
}
public MinerFulQueryingCore(int coreNum, LogParser logParser,
MinerFulCmdParameters minerFulParams, PostProcessingCmdParameters postPrarams,
TaskCharArchive taskCharArchive,
GlobalStatsTable globalStatsTable) {
this(coreNum,logParser,minerFulParams,postPrarams,taskCharArchive,globalStatsTable,null,null);
}
public MinerFulQueryingCore(int coreNum,
LogParser logParser,
MinerFulCmdParameters minerFulParams, PostProcessingCmdParameters postPrarams,
TaskCharArchive taskCharArchive,
GlobalStatsTable globalStatsTable, Set<TaskChar> tasksToQueryFor) {
this(coreNum,logParser,minerFulParams,postPrarams,taskCharArchive,globalStatsTable,tasksToQueryFor,null);
}
public MinerFulQueryingCore(int coreNum,
LogParser logParser,
MinerFulCmdParameters minerFulParams, PostProcessingCmdParameters postPrarams,
TaskCharArchive taskCharArchive,
GlobalStatsTable globalStatsTable,
ConstraintsBag bag) {
this(coreNum,logParser,minerFulParams,postPrarams,taskCharArchive,globalStatsTable,null,bag);
}
public MinerFulQueryingCore(int coreNum,
LogParser logParser,
MinerFulCmdParameters minerFulParams, PostProcessingCmdParameters postPrarams,
TaskCharArchive taskCharArchive,
GlobalStatsTable globalStatsTable, Set<TaskChar> tasksToQueryFor,
ConstraintsBag bag) {
this.jobNum = coreNum;
this.logParser = logParser;
this.minerFulParams = minerFulParams;
this.postPrarams = postPrarams;
this.taskCharArchive = taskCharArchive;
this.statsTable = globalStatsTable;
if (tasksToQueryFor == null) {
this.tasksToQueryFor = taskCharArchive.getTaskChars();
} else {
this.tasksToQueryFor = tasksToQueryFor;
}
this.bag = (bag == null ? new ConstraintsBag(this.tasksToQueryFor) : bag);
}
public void setStatsTable(GlobalStatsTable statsTable) {
this.statsTable = statsTable;
}
public ConstraintsBag discover() {
long
possibleNumberOfConstraints = 0L,
possibleNumberOfExistenceConstraints = 0L,
possibleNumberOfRelationConstraints = 0L,
numOfConstraintsAboveThresholds = 0L,
numOfExistenceConstraintsAboveThresholds = 0L,
numOfRelationConstraintsAboveThresholds = 0L,
before = 0L,
after = 0L,
exiConTime = 0L,
relaConTime = 0L;
if (minerFulParams.statsOutputFile != null) {
try {
this.marshalStats(statsTable, minerFulParams.statsOutputFile, taskCharArchive);
} catch (JAXBException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
logger.info("Discovering existence constraints...");
before = System.currentTimeMillis();
// search for existence constraints
ConstraintsMiner exiConMiner = new ProbabilisticExistenceConstraintsMiner(statsTable, taskCharArchive, tasksToQueryFor);
exiConMiner.setSupportThreshold(postPrarams.supportThreshold);
exiConMiner.setConfidenceThreshold(postPrarams.confidenceThreshold);
exiConMiner.setInterestFactorThreshold(postPrarams.interestFactorThreshold);
// ConstraintsBag updatedBag =
exiConMiner.discoverConstraints(this.bag);
after = System.currentTimeMillis();
exiConTime = after - before;
logger.debug("Existence constraints, computed in: " + exiConTime + " msec");
possibleNumberOfExistenceConstraints = exiConMiner.howManyPossibleConstraints();
possibleNumberOfConstraints += possibleNumberOfExistenceConstraints;
numOfExistenceConstraintsAboveThresholds = exiConMiner.getComputedConstraintsAboveTresholds();
numOfConstraintsAboveThresholds += numOfExistenceConstraintsAboveThresholds;
logger.info("Discovering relation constraints...");
before = System.currentTimeMillis();
// search for relation constraints
relaConTime = 0;
ConstraintsMiner relaConMiner = null;
if (!minerFulParams.isBranchingRequired()) {
relaConMiner = new ProbabilisticRelationConstraintsMiner(statsTable, taskCharArchive, tasksToQueryFor, minerFulParams.foreseeDistances);
} else {
relaConMiner = new ProbabilisticRelationBranchedConstraintsMiner(statsTable, taskCharArchive, tasksToQueryFor, minerFulParams.branchingLimit);
}
relaConMiner.setSupportThreshold(postPrarams.supportThreshold);
relaConMiner.setConfidenceThreshold(postPrarams.confidenceThreshold);
relaConMiner.setInterestFactorThreshold(postPrarams.interestFactorThreshold);
// updatedBag = relaConMiner.discoverConstraints(updatedBag);
relaConMiner.discoverConstraints(this.bag);
after = System.currentTimeMillis();
relaConTime = after - before;
/*
// Calculate how much was the space for data structures
if (minerFulParams.memSpaceShowingRequested) {
maxMemUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed();
}
*/
// Let us try to free memory from the unused statsTable!
// System.gc();
logger.info("Done!");
logger.debug("Relation constraints, computed in: " + relaConTime + " msec");
possibleNumberOfRelationConstraints = relaConMiner.howManyPossibleConstraints();
possibleNumberOfConstraints += possibleNumberOfRelationConstraints;
numOfRelationConstraintsAboveThresholds = relaConMiner.getComputedConstraintsAboveTresholds();
numOfConstraintsAboveThresholds += numOfRelationConstraintsAboveThresholds;
printComputationStats(// occuTabTime,
exiConTime, relaConTime, //maxMemUsage,
0,
possibleNumberOfConstraints,
possibleNumberOfExistenceConstraints,
possibleNumberOfRelationConstraints,
numOfConstraintsAboveThresholds,
numOfExistenceConstraintsAboveThresholds,
numOfRelationConstraintsAboveThresholds);
return this.bag;
}
public void printComputationStats(
//long occuTabTime,
long exiConTime,
long relaConTime, long maxMemUsage,
long possibleNumberOfConstraints,
long possibleNumberOfExistenceConstraints,
long possibleNumberOfRelationConstraints,
long numOfConstraintsAboveThresholds,
long numOfExistenceConstraintsAboveThresholds,
long numOfRelationConstraintsAboveThresholds) {
StringBuffer
csvSummaryBuffer = new StringBuffer(),
csvSummaryLegendBuffer = new StringBuffer(),
csvSummaryComprehensiveBuffer = new StringBuffer();
csvSummaryBuffer.append(MinerFulQueryingCore.KB_QUERYING_CODE);
csvSummaryLegendBuffer.append("'Operation code for KB querying'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(jobNum);
csvSummaryLegendBuffer.append("'Job number'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(tasksToQueryFor.size());
csvSummaryLegendBuffer.append("'Number of inspected activities'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// csvSummaryLegendBuffer.append("'Total time'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(occuTabTime + exiConTime + relaConTime + pruniTime);
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Statistics computation time'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(occuTabTime);
// csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Total querying time'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(exiConTime + relaConTime);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Constraints discovery time'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(exiConTime + relaConTime);
csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Subsumption hierarchy pruning time'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(pruniTime);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Relation constraints discovery time'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(relaConTime);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Existence constraints discovery time'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(exiConTime);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Maximum memory usage'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(maxMemUsage);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Total number of discoverable constraints'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(possibleNumberOfConstraints);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Total number of discoverable existence constraints'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(possibleNumberOfExistenceConstraints);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Total number of discoverable relation constraints'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(possibleNumberOfRelationConstraints);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Total number of discovered constraints above thresholds'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(numOfConstraintsAboveThresholds);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Total number of discovered existence constraints above thresholds'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(numOfExistenceConstraintsAboveThresholds);
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append("'Total number of discovered relation constraints above thresholds'");
csvSummaryLegendBuffer.append(";");
csvSummaryBuffer.append(numOfRelationConstraintsAboveThresholds);
csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Constraints before hierarchy-based pruning'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(numOfConstraintsBeforePruning);
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Existence constraints before hierarchy-based pruning'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(numOfExistenceConstraintsBeforePruning);
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Relation constraints before hierarchy-based pruning'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(numOfRelationConstraintsBeforePruning);
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Constraints before threshold-based pruning'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(numOfPrunedByHierarchyConstraints);
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Existence constraints before threshold-based pruning'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(numOfPrunedByHierarchyExistenceConstraints);
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Relation onstraints before threshold-based pruning'");
// csvSummaryLegendBuffer.append(";");
// csvSummaryBuffer.append(numOfPrunedByHierarchyRelationConstraints);
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Constraints after pruning';");
// csvSummaryBuffer.append(numOfConstraintsAfterPruningAndThresholding);
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Existence constraints after pruning';");
// csvSummaryBuffer.append(numOfExistenceConstraintsAfterPruningAndThresholding);
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append("'Relation constraints after pruning'");
// csvSummaryBuffer.append(numOfRelationConstraintsAfterPruningAndThresholding);
csvSummaryComprehensiveBuffer.append("\n\nTimings' summary: \n");
csvSummaryComprehensiveBuffer.append(csvSummaryLegendBuffer.toString());
csvSummaryComprehensiveBuffer.append("\n");
csvSummaryComprehensiveBuffer.append(csvSummaryBuffer.toString());
logger.info(csvSummaryComprehensiveBuffer.toString());
}
public void marshalStats(GlobalStatsTable statsTable, File outFile, TaskCharArchive taskCharArchive) throws JAXBException, IOException {
String pkgName = statsTable.getClass().getCanonicalName().toString();
pkgName = pkgName.substring(0, pkgName.lastIndexOf('.'));
JAXBContext jaxbCtx = JAXBContext.newInstance(pkgName);
Marshaller marsh = jaxbCtx.createMarshaller();
marsh.setProperty("jaxb.formatted.output", true);
if (taskCharArchive == null) {
OutputStream os = new FileOutputStream(outFile);
marsh.marshal(statsTable, os);
os.flush();
os.close();
} else {
// TODO AWFUL but probably less time-consuming
StringWriter sWri = new StringWriter();
marsh.marshal(statsTable, sWri);
Pattern p = Pattern.compile("task=\"(.)\"");
String rawXml = sWri.toString();
StringBuffer sBuf = new StringBuffer(rawXml.length());
Matcher match = p.matcher(rawXml);
String auxDecodedTask = null;
while (match.find()) {
auxDecodedTask = StringEscapeUtils.escapeXml(taskCharArchive.getTaskChar(match.group(1).charAt(0)).getName());
match.appendReplacement(sBuf, "task=\"" + auxDecodedTask + "\"");
}
match.appendTail(sBuf);
PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(outFile)));
out.print(sBuf);
out.flush();
out.close();
}
}
@Override
public ConstraintsBag call() throws Exception {
return this.discover();
}
} | 16,099 | 43.352617 | 151 | java |
Janus | Janus-master/src/minerful/miner/engine/ProbabilisticRelationInBranchedConstraintsMiningEngine.java | package minerful.miner.engine;
import java.util.SortedSet;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.relation.AlternatePrecedence;
import minerful.concept.constraint.relation.AlternateResponse;
import minerful.concept.constraint.relation.AlternateSuccession;
import minerful.concept.constraint.relation.ChainPrecedence;
import minerful.concept.constraint.relation.ChainResponse;
import minerful.concept.constraint.relation.ChainSuccession;
import minerful.concept.constraint.relation.CoExistence;
import minerful.concept.constraint.relation.MutualRelationConstraint;
import minerful.concept.constraint.relation.Precedence;
import minerful.concept.constraint.relation.RespondedExistence;
import minerful.concept.constraint.relation.Response;
import minerful.concept.constraint.relation.Succession;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.LocalStatsWrapper;
import minerful.miner.stats.LocalStatsWrapperForCharsets;
import minerful.miner.stats.charsets.TasksSetCounter;
public class ProbabilisticRelationInBranchedConstraintsMiningEngine {
private GlobalStatsTable globalStats;
public ProbabilisticRelationInBranchedConstraintsMiningEngine(
GlobalStatsTable globalStats) {
this.globalStats = globalStats;
}
public AlternatePrecedence discoverBranchedAlternatePrecedenceConstraints(
TaskChar searched,
LocalStatsWrapper searchedLocalStats,
long searchedAppearances,
TaskCharSet comboToAnalyze) {
AlternatePrecedence nuConstraint = null;
if (searchedAppearances < 1)
return nuConstraint;
double support = 0;
int negativeOccurrences = 0;
LocalStatsWrapperForCharsets extSearchedLocalStats = (LocalStatsWrapperForCharsets) searchedLocalStats;
SortedSet<TasksSetCounter> neverMoreBeforeFirstCharSets =
extSearchedLocalStats.getNeverMoreBeforeFirstOccurrenceCharacterSets()
.selectCharSetCountersSharedAmong(
comboToAnalyze.getTaskCharsArray()
);
SortedSet<TasksSetCounter> alternationCharSets =
extSearchedLocalStats.getRepetitionsAfterCharactersAppearingBefore()
.selectCharSetCountersSharedAmong(
comboToAnalyze.getTaskCharsArray()
);
for (TasksSetCounter neverMoreBeforeFirstCharSet : neverMoreBeforeFirstCharSets) {
negativeOccurrences += neverMoreBeforeFirstCharSet.getCounter();
}
for (TasksSetCounter alternationBeforeCharSet : alternationCharSets) {
negativeOccurrences += alternationBeforeCharSet.getCounter();
}
support = 1.0 - (double)negativeOccurrences / (double)searchedAppearances;
nuConstraint = new AlternatePrecedence(
new TaskCharSet(searched),
comboToAnalyze,
support);
return nuConstraint;
}
public AlternateResponse discoverBranchedAlternateResponseConstraints(
TaskChar searched,
TaskCharSet comboToAnalyze) {
AlternateResponse nuConstraint = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper pivotStatsWrapper = null;
for (TaskChar pivot : comboToAnalyze.getTaskCharsArray()) {
pivotStatsWrapper = globalStats.statsTable.get(pivot);
negativeOccurrences += pivotStatsWrapper.interplayStatsTable.get(searched.identifier).howManyTimesItNeverAppearedOnwards();
negativeOccurrences += pivotStatsWrapper.interplayStatsTable.get(searched.identifier).betweenOnwards;
denominator += pivotStatsWrapper.getTotalAmountOfOccurrences();
}
if (denominator > 0) { // in case no pivot ever appeared, do not add this constraint!
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new AlternateResponse(
comboToAnalyze,
new TaskCharSet(searched),
support);
}
return nuConstraint;
}
public AlternateSuccession discoverBranchedAlternateSuccessionConstraints(
TaskChar searched,
LocalStatsWrapper searchedLocalStats,
long searchedAppearances,
TaskCharSet comboToAnalyze) {
AlternateSuccession nuConstraint = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper pivotStatsWrapper = null;
LocalStatsWrapperForCharsets extSearchedLocalStats = (LocalStatsWrapperForCharsets) searchedLocalStats;
SortedSet<TasksSetCounter>
neverAppearedBeforeCharSets = null,
repetitionsBeforeAppearingAfterCharSets = null;
negativeOccurrences = 0;
denominator = 0;
for (TaskChar pivot : comboToAnalyze.getTaskCharsArray()) {
pivotStatsWrapper = globalStats.statsTable.get(pivot);
negativeOccurrences += pivotStatsWrapper.interplayStatsTable.get(searched.identifier).howManyTimesItNeverAppearedOnwards();
negativeOccurrences += pivotStatsWrapper.interplayStatsTable.get(searched.identifier).betweenOnwards;
denominator += pivotStatsWrapper.getTotalAmountOfOccurrences();
}
neverAppearedBeforeCharSets =
extSearchedLocalStats.getNeverMoreAppearedBeforeCharacterSets().selectCharSetCountersSharedAmong(comboToAnalyze.getTaskCharsArray());
// repetitionsBeforeAppearingAfterCharSets =
// extSearchedLocalStats.repetitionsAfterCharactersAppearingBefore.selectCharSetCountersSharedAmong(comboToAnalyze.getTaskChars());
// for (CharactersSetCounter repetitionsAfterCharactersAppearingBeforeCharSet : repetitionsBeforeAppearingAfterCharSets) {
// negativeOccurrences += repetitionsAfterCharactersAppearingBeforeCharSet.getCounter();
// }
for (TasksSetCounter neverAppearedCharSet : neverAppearedBeforeCharSets) {
negativeOccurrences += neverAppearedCharSet.getCounter();
}
denominator += searchedAppearances;
if (denominator > 0) { // in case no pivot nor searched ever appeared, do not add this constraint!
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new AlternateSuccession(
comboToAnalyze,
new TaskCharSet(searched),
support);
}
return nuConstraint;
}
public ChainPrecedence discoverBranchedChainPrecedenceConstraints(
TaskChar searched,
LocalStatsWrapper searchedLocalStats,
long searchedAppearances,
TaskCharSet comboToAnalyze) {
ChainPrecedence nuConstraint = null;
if (searchedAppearances < 1)
return nuConstraint;
int positiveOccurrences = 0;
double support = 0;
Integer tmpPositiveOccurrencesAdder = null;
for (TaskChar pivot : comboToAnalyze.getTaskCharsArray()) {
tmpPositiveOccurrencesAdder = searchedLocalStats.interplayStatsTable.get(pivot).distances.get(-1);
if (tmpPositiveOccurrencesAdder != null)
positiveOccurrences += tmpPositiveOccurrencesAdder;
}
support = (double) positiveOccurrences / (double) searchedAppearances;
nuConstraint = new ChainPrecedence(
new TaskCharSet(searched),
comboToAnalyze,
support);
return nuConstraint;
}
public ChainResponse discoverBranchedChainResponseConstraints(
TaskChar searched,
TaskCharSet comboToAnalyze) {
ChainResponse nuConstraint = null;
int positiveOccurrences = 0,
denominator = 0;
Integer tmpPositiveOccurrencesAdder = null;
double support = 0;
LocalStatsWrapper pivotStatsWrapper = null;
for (TaskChar pivot : comboToAnalyze.getTaskCharsArray()) {
pivotStatsWrapper = globalStats.statsTable.get(pivot);
tmpPositiveOccurrencesAdder = pivotStatsWrapper.interplayStatsTable.get(searched.identifier).distances.get(1);
if (tmpPositiveOccurrencesAdder != null)
positiveOccurrences += tmpPositiveOccurrencesAdder;
denominator += pivotStatsWrapper.getTotalAmountOfOccurrences();
}
if (denominator > 0) { // in case no pivot ever appeared, do not add this constraint!
support = (double) positiveOccurrences / (double) denominator;
nuConstraint = new ChainResponse(
comboToAnalyze,
new TaskCharSet(searched),
support);
}
return nuConstraint;
}
public ChainSuccession discoverBranchedChainSuccessionConstraint(
TaskChar searched,
LocalStatsWrapper searchedLocalStats,
long searchedAppearances,
TaskCharSet comboToAnalyze) {
ChainSuccession nuConstraint = null;
int positiveOccurrences = 0,
denominator = 0;
double support = 0;
Integer tmpPositiveOccurrencesAdder = null;
LocalStatsWrapper
pivotLocalStats = null;
positiveOccurrences = 0;
denominator = (int) searchedAppearances;
for (TaskChar pivot : comboToAnalyze.getTaskCharsArray()) {
pivotLocalStats = globalStats.statsTable.get(pivot);
tmpPositiveOccurrencesAdder = pivotLocalStats.interplayStatsTable.get(searched.identifier).distances.get(1);
if (tmpPositiveOccurrencesAdder != null)
positiveOccurrences += tmpPositiveOccurrencesAdder;
denominator += pivotLocalStats.getTotalAmountOfOccurrences();
tmpPositiveOccurrencesAdder = searchedLocalStats.interplayStatsTable.get(pivot).distances.get(-1);
if (tmpPositiveOccurrencesAdder != null)
positiveOccurrences += tmpPositiveOccurrencesAdder;
}
support = (double) positiveOccurrences / (double) denominator;
nuConstraint = new ChainSuccession(
comboToAnalyze,
new TaskCharSet(searched),
support);
return nuConstraint;
}
public MutualRelationConstraint discoverBranchedCoExistenceConstraints(
TaskChar searched,
LocalStatsWrapper searchedLocalStats,
long searchedAppearances,
TaskCharSet comboToAnalyze) {
MutualRelationConstraint nuConstraint = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper pivotStatsWrapper = null;
LocalStatsWrapperForCharsets extSearchedLocalStats = (LocalStatsWrapperForCharsets) searchedLocalStats;
SortedSet<TasksSetCounter> neverAppearedCharSets = null;
negativeOccurrences = 0;
denominator = 0;
for (TaskChar pivot : comboToAnalyze.getTaskCharsArray()) {
pivotStatsWrapper = globalStats.statsTable.get(pivot);
negativeOccurrences += pivotStatsWrapper.interplayStatsTable.get(searched.identifier).howManyTimesItNeverAppearedAtAll();
denominator += pivotStatsWrapper.getTotalAmountOfOccurrences();
}
neverAppearedCharSets =
extSearchedLocalStats.getNeverAppearedCharacterSets().selectCharSetCountersSharedAmong(comboToAnalyze.getTaskCharsArray());
for (TasksSetCounter neverAppearedCharSet : neverAppearedCharSets) {
negativeOccurrences += neverAppearedCharSet.getCounter();
}
denominator += searchedAppearances;
if (denominator > 0) { // in case no pivot nor searched ever appeared, do not add this constraint!
support = 1.0 - (double)negativeOccurrences / (double)denominator;
nuConstraint = new CoExistence(
comboToAnalyze,
new TaskCharSet(searched),
support);
}
return nuConstraint;
}
public Precedence discoverBranchedPrecedenceConstraints(
TaskChar searched,
LocalStatsWrapper searchedLocalStats,
long searchedAppearances,
TaskCharSet comboToAnalyze) {
Precedence nuConstraint = null;
if (searchedAppearances < 1)
return nuConstraint;
LocalStatsWrapperForCharsets extSearchedLocalStats = (LocalStatsWrapperForCharsets) searchedLocalStats;
SortedSet<TasksSetCounter> neverBeforeAppearedCharSets = null;
int negativeOccurrences = 0;
double support = 0;
neverBeforeAppearedCharSets =
extSearchedLocalStats.getNeverMoreAppearedBeforeCharacterSets().selectCharSetCountersSharedAmong(comboToAnalyze.getTaskCharsArray());
if (neverBeforeAppearedCharSets.size() == 0) {
nuConstraint = new Precedence(
new TaskCharSet(searched),
comboToAnalyze,
1.0);
} else {
for (TasksSetCounter neverAppearedAfterCharSet : neverBeforeAppearedCharSets) {
negativeOccurrences += neverAppearedAfterCharSet.getCounter();
support = 1.0 - (double)negativeOccurrences / (double)searchedAppearances;
nuConstraint = new Precedence(
new TaskCharSet(searched),
comboToAnalyze,
support);
}
}
return nuConstraint;
}
public RespondedExistence discoverBranchedRespondedExistenceConstraints(
TaskChar searched,
TaskCharSet comboToAnalyze) {
RespondedExistence nuConstraint = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper pivotStatsWrapper = null;
for (TaskChar pivot : comboToAnalyze.getTaskCharsArray()) {
pivotStatsWrapper = globalStats.statsTable.get(pivot);
negativeOccurrences += pivotStatsWrapper.interplayStatsTable.get(searched.identifier).howManyTimesItNeverAppearedAtAll();
denominator += pivotStatsWrapper.getTotalAmountOfOccurrences();
}
if (denominator > 0) { // in case no pivot ever appeared, do not add this constraint!
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new RespondedExistence(
comboToAnalyze,
new TaskCharSet(searched),
support);
}
return nuConstraint;
}
public Response discoverBranchedResponseConstraints(
TaskChar searched,
TaskCharSet comboToAnalyze) {
Response nuConstraint = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper pivotStatsWrapper = null;
for (TaskChar pivot : comboToAnalyze.getTaskCharsArray()) {
pivotStatsWrapper = globalStats.statsTable.get(pivot);
negativeOccurrences += pivotStatsWrapper.interplayStatsTable.get(searched.identifier).howManyTimesItNeverAppearedOnwards();
denominator += pivotStatsWrapper.getTotalAmountOfOccurrences();
}
if (denominator > 0) { // in case no pivot ever appeared, do not add this constraint!
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new Response(
comboToAnalyze,
new TaskCharSet(searched),
support);
}
return nuConstraint;
}
public Succession discoverBranchedSuccessionConstraints(
TaskChar searched,
LocalStatsWrapper searchedLocalStats,
long searchedAppearances,
TaskCharSet comboToAnalyze) {
Succession nuConstraint = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper pivotStatsWrapper = null;
LocalStatsWrapperForCharsets extSearchedLocalStats = (LocalStatsWrapperForCharsets) searchedLocalStats;
SortedSet<TasksSetCounter> neverAppearedCharSets = null;
negativeOccurrences = 0;
denominator = 0;
for (TaskChar pivot : comboToAnalyze.getTaskCharsArray()) {
pivotStatsWrapper = globalStats.statsTable.get(pivot);
negativeOccurrences += pivotStatsWrapper.interplayStatsTable.get(searched.identifier).howManyTimesItNeverAppearedOnwards();
denominator += pivotStatsWrapper.getTotalAmountOfOccurrences();
}
neverAppearedCharSets =
extSearchedLocalStats.getNeverMoreAppearedBeforeCharacterSets().selectCharSetCountersSharedAmong(comboToAnalyze.getTaskCharsArray());
for (TasksSetCounter neverAppearedCharSet : neverAppearedCharSets) {
negativeOccurrences += neverAppearedCharSet.getCounter();
}
denominator += searchedAppearances;
if (denominator > 0) { // in case no pivot nor searched ever appeared, do not add this constraint!
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new Succession(
comboToAnalyze,
new TaskCharSet(searched),
support);
}
return nuConstraint;
}
}
| 15,149 | 35.071429 | 137 | java |
Janus | Janus-master/src/minerful/miner/engine/ProbabilisticRelationOutBranchedConstraintsMiningEngine.java | package minerful.miner.engine;
import java.util.SortedSet;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.relation.AlternatePrecedence;
import minerful.concept.constraint.relation.AlternateResponse;
import minerful.concept.constraint.relation.AlternateSuccession;
import minerful.concept.constraint.relation.ChainPrecedence;
import minerful.concept.constraint.relation.ChainResponse;
import minerful.concept.constraint.relation.ChainSuccession;
import minerful.concept.constraint.relation.CoExistence;
import minerful.concept.constraint.relation.MutualRelationConstraint;
import minerful.concept.constraint.relation.Precedence;
import minerful.concept.constraint.relation.RespondedExistence;
import minerful.concept.constraint.relation.Response;
import minerful.concept.constraint.relation.Succession;
import minerful.miner.stats.GlobalStatsTable;
import minerful.miner.stats.LocalStatsWrapper;
import minerful.miner.stats.LocalStatsWrapperForCharsets;
import minerful.miner.stats.charsets.TasksSetCounter;
public class ProbabilisticRelationOutBranchedConstraintsMiningEngine {
private GlobalStatsTable globalStats;
public ProbabilisticRelationOutBranchedConstraintsMiningEngine(
GlobalStatsTable globalStats) {
this.globalStats = globalStats;
}
public AlternatePrecedence discoverBranchedAlternatePrecedenceConstraints(
TaskChar pivotTaskCh,
TaskCharSet comboToAnalyze) {
AlternatePrecedence nuConstraint = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper searchedStatsWrapper = null;
for (TaskChar searched : comboToAnalyze.getTaskCharsArray()) {
searchedStatsWrapper = globalStats.statsTable.get(searched);
negativeOccurrences += searchedStatsWrapper.interplayStatsTable.get(pivotTaskCh.identifier).howManyTimesItNeverAppearedBackwards();
negativeOccurrences += searchedStatsWrapper.interplayStatsTable.get(pivotTaskCh.identifier).betweenBackwards;
denominator += searchedStatsWrapper.getTotalAmountOfOccurrences();
}
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new AlternatePrecedence(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
return nuConstraint;
}
public AlternateResponse discoverBranchedAlternateResponseConstraints(
TaskChar pivotTaskCh,
LocalStatsWrapper pivotLocalStats,
long pivotAppearances,
TaskCharSet comboToAnalyze) {
AlternateResponse nuConstraint = null;
if (pivotAppearances < 1)
return nuConstraint;
double support = 0;
int negativeOccurrences = 0;
LocalStatsWrapperForCharsets extPivotLocalStats = (LocalStatsWrapperForCharsets) pivotLocalStats;
SortedSet<TasksSetCounter> neverMoreAfterLastCharSets =
extPivotLocalStats.getNeverMoreAfterLastOccurrenceCharacterSets()
.selectCharSetCountersSharedAmong(
comboToAnalyze.getTaskCharsArray()
);
SortedSet<TasksSetCounter> alternationCharSets =
extPivotLocalStats.getRepetitionsBeforeCharactersAppearingAfter()
.selectCharSetCountersSharedAmong(
comboToAnalyze.getTaskCharsArray()
);
for (TasksSetCounter neverMoreAfterLastCharSet : neverMoreAfterLastCharSets) {
negativeOccurrences += neverMoreAfterLastCharSet.getCounter();
}
for (TasksSetCounter alternationAfterCharSet : alternationCharSets) {
negativeOccurrences += alternationAfterCharSet.getCounter();
}
support = 1.0 - (double)negativeOccurrences / (double)pivotAppearances;
nuConstraint = new AlternateResponse(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
return nuConstraint;
}
public AlternateSuccession discoverBranchedAlternateSuccessionConstraints(
TaskChar pivotTaskCh,
long pivotAppearances,
TaskCharSet comboToAnalyze) {
AlternateSuccession nuConstraint = null;
LocalStatsWrapperForCharsets extPivotLocalStats = (LocalStatsWrapperForCharsets) (globalStats.statsTable.get(pivotTaskCh.identifier));
SortedSet<TasksSetCounter>
neverAppearedCharSets = null,
repetitionsBeforeAppearingAfterCharSets = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper searchedStatsWrapper = null;
// repetitionsBeforeAppearingAfterCharSets =
// extPivotLocalStats.repetitionsBeforeCharactersAppearingAfter.selectCharSetCountersSharedAmong(comboToAnalyze.getTaskChars());
neverAppearedCharSets =
extPivotLocalStats.getNeverMoreAppearedAfterCharacterSets().selectCharSetCountersSharedAmong(comboToAnalyze.getTaskCharsArray());
// for (CharactersSetCounter repetitionsBeforeAppearingAfterCharSet : repetitionsBeforeAppearingAfterCharSets) {
// negativeOccurrences += repetitionsBeforeAppearingAfterCharSet.getCounter();
// }
for (TasksSetCounter neverAppearedCharSet : neverAppearedCharSets) {
negativeOccurrences += neverAppearedCharSet.getCounter();
}
denominator += pivotAppearances;
for (TaskChar searched : comboToAnalyze.getTaskCharsArray()) {
searchedStatsWrapper = globalStats.statsTable.get(searched);
negativeOccurrences += searchedStatsWrapper.interplayStatsTable.get(pivotTaskCh.identifier).howManyTimesItNeverAppearedBackwards();
negativeOccurrences += searchedStatsWrapper.interplayStatsTable.get(pivotTaskCh.identifier).betweenBackwards;
denominator += searchedStatsWrapper.getTotalAmountOfOccurrences();
}
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new AlternateSuccession(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
return nuConstraint;
}
public ChainPrecedence discoverBranchedChainPrecedenceConstraints(
TaskChar pivotTaskCh,
TaskCharSet comboToAnalyze) {
ChainPrecedence nuConstraint = null;
int positiveOccurrences = 0, denominator = 0;
double support = 0;
LocalStatsWrapper searchedStatsWrapper = null;
Integer tmpPositiveOccurrencesAdder = null;
positiveOccurrences = 0;
for (TaskChar searched : comboToAnalyze.getTaskCharsArray()) {
searchedStatsWrapper = globalStats.statsTable.get(searched);
tmpPositiveOccurrencesAdder = searchedStatsWrapper.interplayStatsTable.get(pivotTaskCh.identifier).distances.get(-1);
if (tmpPositiveOccurrencesAdder != null)
positiveOccurrences += tmpPositiveOccurrencesAdder;
denominator += searchedStatsWrapper.getTotalAmountOfOccurrences();
}
support = (double) positiveOccurrences / (double) denominator;
nuConstraint = new ChainPrecedence(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
return nuConstraint;
}
public ChainResponse discoverBranchedChainResponseConstraints(
TaskChar pivotTaskCh,
LocalStatsWrapper pivotLocalStats,
long pivotAppearances,
TaskCharSet comboToAnalyze) {
ChainResponse nuConstraint = null;
if (pivotAppearances < 1)
return nuConstraint;
int positiveOccurrences = 0;
double support = 0;
Integer tmpPositiveOccurrencesAdder = null;
for (TaskChar searched : comboToAnalyze.getTaskCharsArray()) {
tmpPositiveOccurrencesAdder = pivotLocalStats.interplayStatsTable.get(searched).distances.get(1);
if (tmpPositiveOccurrencesAdder != null)
positiveOccurrences += tmpPositiveOccurrencesAdder;
}
support = (double) positiveOccurrences / (double) pivotAppearances;
nuConstraint = new ChainResponse(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
return nuConstraint;
}
public ChainSuccession discoverBranchedChainSuccessionConstraints(
TaskChar pivotTaskCh,
long pivotAppearances,
TaskCharSet comboToAnalyze) {
ChainSuccession nuConstraint = null;
int positiveOccurrences = 0,
denominator = 0;
double support = 0;
Integer tmpPositiveOccurrencesAdder = null;
LocalStatsWrapper
pivotLocalStats = globalStats.statsTable.get(pivotTaskCh.identifier),
searchedLocalStats = null;
denominator = (int) pivotAppearances;
for (TaskChar searched : comboToAnalyze.getTaskCharsArray()) {
tmpPositiveOccurrencesAdder = pivotLocalStats.interplayStatsTable.get(searched).distances.get(1);
if (tmpPositiveOccurrencesAdder != null)
positiveOccurrences += tmpPositiveOccurrencesAdder;
searchedLocalStats = globalStats.statsTable.get(searched);
tmpPositiveOccurrencesAdder = searchedLocalStats.interplayStatsTable.get(pivotTaskCh.identifier).distances.get(-1);
if (tmpPositiveOccurrencesAdder != null)
positiveOccurrences += tmpPositiveOccurrencesAdder;
denominator += searchedLocalStats.getTotalAmountOfOccurrences();
}
support = (double) positiveOccurrences / (double) denominator;
nuConstraint = new ChainSuccession(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
/*
nuConstraint = new NotChainSuccession(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
Constraint.complementSupport(support));
*/
return nuConstraint;
}
public MutualRelationConstraint discoverBranchedCoExistenceConstraints(
TaskChar pivotTaskCh,
long pivotAppearances,
TaskCharSet comboToAnalyze) {
MutualRelationConstraint nuConstraint = null;
LocalStatsWrapperForCharsets extPivotLocalStats = (LocalStatsWrapperForCharsets) (globalStats.statsTable.get(pivotTaskCh.identifier));
SortedSet<TasksSetCounter> neverAppearedCharSets = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper searchedStatsWrapper = null;
neverAppearedCharSets =
extPivotLocalStats.getNeverAppearedCharacterSets().selectCharSetCountersSharedAmong(comboToAnalyze.getTaskCharsArray());
for (TasksSetCounter neverAppearedCharSet : neverAppearedCharSets) {
negativeOccurrences += neverAppearedCharSet.getCounter();
}
denominator += pivotAppearances;
for (TaskChar searched : comboToAnalyze.getTaskCharsArray()) {
searchedStatsWrapper = globalStats.statsTable.get(searched);
negativeOccurrences += searchedStatsWrapper.interplayStatsTable.get(pivotTaskCh.identifier).howManyTimesItNeverAppearedAtAll();
denominator += searchedStatsWrapper.getTotalAmountOfOccurrences();
}
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new CoExistence(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
/*
nuConstraint = new NotCoExistence(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
Constraint.complementSupport(support));
*/
return nuConstraint;
}
public Precedence discoverBranchedPrecedenceConstraints(
TaskChar pivotTaskCh,
TaskCharSet comboToAnalyze) {
Precedence nuConstraint = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper searchedStatsWrapper = null;
negativeOccurrences = 0;
denominator = 0;
for (TaskChar searched : comboToAnalyze.getTaskCharsArray()) {
searchedStatsWrapper = globalStats.statsTable.get(searched);
negativeOccurrences += searchedStatsWrapper.interplayStatsTable.get(pivotTaskCh.identifier).howManyTimesItNeverAppearedBackwards();
denominator += searchedStatsWrapper.getTotalAmountOfOccurrences();
}
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new Precedence(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
return nuConstraint;
}
public RespondedExistence discoverBranchedRespondedExistenceConstraints(
TaskChar pivotTaskCh,
LocalStatsWrapper pivotLocalStats,
long pivotAppearances,
TaskCharSet comboToAnalyze) {
RespondedExistence nuConstraint = null;
if (pivotAppearances < 1)
return nuConstraint;
LocalStatsWrapperForCharsets extPivotLocalStats = (LocalStatsWrapperForCharsets) pivotLocalStats;
SortedSet<TasksSetCounter> neverAppearedCharSets = null;
int negativeOccurrences = 0;
double support = 0;
neverAppearedCharSets =
extPivotLocalStats.getNeverAppearedCharacterSets().selectCharSetCountersSharedAmong(comboToAnalyze.getTaskCharsArray());
if (neverAppearedCharSets.size() == 0) {
nuConstraint = new RespondedExistence(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
1.0);
} else {
for (TasksSetCounter neverAppearedCharSet : neverAppearedCharSets) {
negativeOccurrences += neverAppearedCharSet.getCounter();
}
support = 1.0 - (double)negativeOccurrences / (double)pivotAppearances;
nuConstraint = new RespondedExistence(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
}
return nuConstraint;
}
public Response discoverBranchedResponseConstraints(
TaskChar pivotTaskCh,
LocalStatsWrapper pivotLocalStats,
long pivotAppearances,
TaskCharSet comboToAnalyze) {
Response nuConstraint = null;
if (pivotAppearances < 1)
return nuConstraint;
LocalStatsWrapperForCharsets extPivotLocalStats = (LocalStatsWrapperForCharsets) pivotLocalStats;
SortedSet<TasksSetCounter> neverAppearedCharSets = null;
int negativeOccurrences = 0;
double support = 0;
neverAppearedCharSets =
extPivotLocalStats.getNeverMoreAppearedAfterCharacterSets().selectCharSetCountersSharedAmong(comboToAnalyze.getTaskCharsArray());
if (neverAppearedCharSets.size() == 0) {
nuConstraint = new Response(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
1.0);
} else {
for (TasksSetCounter neverAppearedAfterCharSet : neverAppearedCharSets) {
negativeOccurrences += neverAppearedAfterCharSet.getCounter();
support = 1.0 - (double)negativeOccurrences / (double)pivotAppearances;
nuConstraint = new Response(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
}
}
return nuConstraint;
}
public Succession discoverBranchedSuccessionConstraints(
TaskChar pivotTaskCh,
long pivotAppearances,
TaskCharSet comboToAnalyze) {
Succession nuConstraint = null;
LocalStatsWrapperForCharsets extPivotLocalStats = (LocalStatsWrapperForCharsets) (globalStats.statsTable.get(pivotTaskCh.identifier));
SortedSet<TasksSetCounter> neverAppearedCharSets = null;
int negativeOccurrences = 0,
denominator = 0;
double support = 0;
LocalStatsWrapper searchedStatsWrapper = null;
neverAppearedCharSets =
extPivotLocalStats.getNeverMoreAppearedAfterCharacterSets().selectCharSetCountersSharedAmong(comboToAnalyze.getTaskCharsArray());
for (TasksSetCounter neverAppearedCharSet : neverAppearedCharSets) {
negativeOccurrences += neverAppearedCharSet.getCounter();
}
denominator += pivotAppearances;
for (TaskChar searched : comboToAnalyze.getTaskCharsArray()) {
searchedStatsWrapper = globalStats.statsTable.get(searched);
negativeOccurrences += searchedStatsWrapper.interplayStatsTable.get(pivotTaskCh.identifier).howManyTimesItNeverAppearedBackwards();
// negativeOccurrences += searchedStatsWrapper.localStatsTable.get(pivot).betweenBackwards;
denominator += searchedStatsWrapper.getTotalAmountOfOccurrences();
}
support = 1.0 - (double) negativeOccurrences / (double) denominator;
nuConstraint = new Succession(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
support);
/*
nuConstraint = new NotSuccession(
new TaskCharSet(pivotTaskCh),
comboToAnalyze,
Constraint.complementSupport(support));
*/
return nuConstraint;
}
} | 15,376 | 35.524941 | 137 | java |
Janus | Janus-master/src/minerful/miner/params/MinerFulCmdParameters.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.miner.params;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import minerful.params.ParamsManager;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class MinerFulCmdParameters extends ParamsManager {
public enum CONSTRAINT_KINDS {
EXISTENCE, RELATION,
FORWARD, BACKWARD, MUTUAL,
DIRECTIONED, UNDIRECTIONED,
POSITIVE, NEGATIVE
}
public static final String STATS_OUT_PATH_PARAM_NAME = "statsXML";
public static final String OUT_BRANCHING_LIMIT_PARAM_NAME = "b";
public static final String FORESEE_DISTANCES_PARAM_NAME = "withDist";
public static final String SHOW_MEMSPACE_USED_PARAM_NAME = "showMem";
public static final String EXCLUDED_FROM_RESULTS_SPEC_FILE_PATH_PARAM_NAME = "exclTasks";
public static final String KB_PARALLEL_COMPUTATION_THREADS_PARAM_NAME = "para";
public static final String QUERY_PARALLEL_COMPUTATION_THREADS_PARAM_NAME = "paraQ";
// public static final String TIME_ANALYSIS_PARAM_NAME = "time";
public static final Integer MINIMUM_BRANCHING_LIMIT = 1;
public static final Integer DEFAULT_OUT_BRANCHING_LIMIT = MINIMUM_BRANCHING_LIMIT;
public static final Integer MINIMUM_PARALLEL_EXECUTION_THREADS = 1;
/** Out-branching maximum level for discovered constraints (must be greater than or equal to {@link #MINIMUM_BRANCHING_LIMIT MINIMUM_BRANCHING_LIMIT}, the default) */
public Integer branchingLimit;
/** Output file where log statistics are printed out */
public File statsOutputFile;
/** Ignore this */
public Boolean foreseeDistances;
/** Ignore this */
public Boolean memSpaceShowingRequested;
/** Collection of task names to exclude from the discovery */
public Collection<String> activitiesToExcludeFromResult;
/** Number of parallel threads to use while running the knowledge-base discovery phase of the algorithm (must be greater than or equal to {@link #MINIMUM_PARALLEL_EXECUTION_THREADS MINIMUM_PARALLEL_EXECUTION_THREADS}, the default) */
public Integer kbParallelProcessingThreads;
/** Number of parallel threads to use while running the knowledge-base discovery phase of the algorithm (must be greater than or equal to {@link #MINIMUM_PARALLEL_EXECUTION_THREADS MINIMUM_PARALLEL_EXECUTION_THREADS}, the default) */
public Integer queryParallelProcessingThreads;
public MinerFulCmdParameters() {
super();
this.branchingLimit = DEFAULT_OUT_BRANCHING_LIMIT;
this.foreseeDistances = false;
this.memSpaceShowingRequested = false;
this.kbParallelProcessingThreads = MINIMUM_PARALLEL_EXECUTION_THREADS;
this.queryParallelProcessingThreads = MINIMUM_PARALLEL_EXECUTION_THREADS;
// this.takeTime = false;
}
public MinerFulCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public MinerFulCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.branchingLimit = Integer.valueOf(line.getOptionValue(
OUT_BRANCHING_LIMIT_PARAM_NAME,
this.branchingLimit.toString()
)
);
if (this.branchingLimit < MINIMUM_BRANCHING_LIMIT) {
throw new IllegalArgumentException(
"Invalid value for " + OUT_BRANCHING_LIMIT_PARAM_NAME + " option" +
" (must be equal to or greater than " + (MINIMUM_BRANCHING_LIMIT) + ")");
}
this.kbParallelProcessingThreads = Integer.valueOf(line.getOptionValue(
KB_PARALLEL_COMPUTATION_THREADS_PARAM_NAME,
kbParallelProcessingThreads.toString()
)
);
this.queryParallelProcessingThreads = Integer.valueOf(line.getOptionValue(
QUERY_PARALLEL_COMPUTATION_THREADS_PARAM_NAME,
queryParallelProcessingThreads.toString()
)
);
if (this.kbParallelProcessingThreads < MINIMUM_PARALLEL_EXECUTION_THREADS) {
throw new IllegalArgumentException(
"Invalid value for " + KB_PARALLEL_COMPUTATION_THREADS_PARAM_NAME + " option" +
" (must be equal to or greater than " + (MINIMUM_PARALLEL_EXECUTION_THREADS) + ")");
}
if (this.queryParallelProcessingThreads < MINIMUM_PARALLEL_EXECUTION_THREADS) {
throw new IllegalArgumentException(
"Invalid value for " + QUERY_PARALLEL_COMPUTATION_THREADS_PARAM_NAME + " option" +
" (must be equal to or greater than " + (MINIMUM_PARALLEL_EXECUTION_THREADS) + ")");
}
this.foreseeDistances = line.hasOption(FORESEE_DISTANCES_PARAM_NAME);
this.memSpaceShowingRequested = line.hasOption(SHOW_MEMSPACE_USED_PARAM_NAME);
// this.takeTime = line.hasOption(TIME_ANALYSIS_PARAM);
this.statsOutputFile = openOutputFile(line, STATS_OUT_PATH_PARAM_NAME);
File listOfExcludedOnesFromResultsFile = openInputFile(line, EXCLUDED_FROM_RESULTS_SPEC_FILE_PATH_PARAM_NAME);
if (listOfExcludedOnesFromResultsFile != null) {
try {
BufferedReader buRo = new BufferedReader(new FileReader(listOfExcludedOnesFromResultsFile));
String excluActi = buRo.readLine();
this.activitiesToExcludeFromResult = new ArrayList<String>();
while (excluActi != null) {
this.activitiesToExcludeFromResult.add(excluActi);
excluActi = buRo.readLine();
}
buRo.close();
} catch (IOException e) {
throw new IllegalArgumentException("Unreadable file: " + line.getOptionValue(EXCLUDED_FROM_RESULTS_SPEC_FILE_PATH_PARAM_NAME));
}
}
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt: myOptions.getOptions())
options.addOption((Option)myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(STATS_OUT_PATH_PARAM_NAME)
.hasArg().argName("path")
.longOpt("stats-XML-out")
.desc("path of the file in which the statistics kept in the MINERful knowledge base (say, that task A occurs but B does not for N times, etc.) should be saved; the file is stored in an XML format")
.type(String.class)
.build()
);
options.addOption(
Option.builder(EXCLUDED_FROM_RESULTS_SPEC_FILE_PATH_PARAM_NAME)
.hasArg().argName("path")
.longOpt("exclude-results-in")
.desc("path of the file where the tasks to exclude from the result are listed")
.type(String.class)
.build()
);
options.addOption(
Option.builder(OUT_BRANCHING_LIMIT_PARAM_NAME)
.hasArg().argName("number")
.longOpt("out-branch")
.desc("out-branching maximum level for discovered constraints (must be greater than or equal to "
+ (MINIMUM_BRANCHING_LIMIT)
+ ")"
+ printDefault(DEFAULT_OUT_BRANCHING_LIMIT))
.type(String.class)
.build()
);
options.addOption(
Option.builder(KB_PARALLEL_COMPUTATION_THREADS_PARAM_NAME)
.hasArg().argName("number")
.longOpt("kb-ll-threads")
.desc("threads for log-processing parallel execution (must be greater than or equal to "
+ (MINIMUM_PARALLEL_EXECUTION_THREADS)
+ ")"
+ printDefault(MINIMUM_PARALLEL_EXECUTION_THREADS))
.type(String.class)
.build()
);
options.addOption(
Option.builder(QUERY_PARALLEL_COMPUTATION_THREADS_PARAM_NAME)
.hasArg().argName("number")
.longOpt("q-ll-threads")
.desc("threads for querying parallel execution of the knowledge base (must be greater than or equal to "
+ (MINIMUM_PARALLEL_EXECUTION_THREADS)
+ ")"
+ printDefault(MINIMUM_PARALLEL_EXECUTION_THREADS))
.type(String.class)
.build()
);
options.addOption(
Option.builder(FORESEE_DISTANCES_PARAM_NAME)
.longOpt("foresee-distances")
.desc(
attachInstabilityWarningToDescription("compute the foreseen confidence interval for the expected distance between tasks in relation constraints")
)
.build()
);
// options.addOption(
// Option.builder(TIME_ANALYSIS_PARAM)
// .longOpt("time-aware")
// .desc(
// attachInstabilityWarningToDescription("include the analysis of event timestamps into discovery")
// )
// );
options.addOption(
Option.builder(SHOW_MEMSPACE_USED_PARAM_NAME)
.longOpt("show-mem-peak")
.desc("show the memory consumption peak (could slow down the overall computation)")
.build()
);
options.addOption(
Option.builder(QUERY_PARALLEL_COMPUTATION_THREADS_PARAM_NAME)
.hasArg().argName("number")
.longOpt("q-ll-threads")
.desc("threads for querying parallel execution of the knowledge base (must be greater than or equal to "
+ (MINIMUM_PARALLEL_EXECUTION_THREADS)
+ ")"
+ printDefault(MINIMUM_PARALLEL_EXECUTION_THREADS))
.type(String.class)
.build()
);
return options;
}
public boolean isBranchingRequired() {
return this.branchingLimit > MINIMUM_BRANCHING_LIMIT;
}
public boolean isParallelQueryProcessingRequired() {
return this.queryParallelProcessingThreads > MinerFulCmdParameters.MINIMUM_PARALLEL_EXECUTION_THREADS;
}
public boolean isParallelKbComputationRequired() {
return this.kbParallelProcessingThreads > MinerFulCmdParameters.MINIMUM_PARALLEL_EXECUTION_THREADS;
}
} | 9,852 | 39.381148 | 238 | java |
Janus | Janus-master/src/minerful/miner/stats/GlobalStatsTable.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.miner.stats;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.apache.log4j.Logger;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.miner.stats.xmlenc.GlobalStatsMapAdapter;
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public class GlobalStatsTable {
@XmlTransient
protected static Logger logger = Logger.getLogger(GlobalStatsTable.class);
@XmlElement
@XmlJavaTypeAdapter(value=GlobalStatsMapAdapter.class)
public Map<TaskChar, LocalStatsWrapper> statsTable;
@XmlTransient
public final TaskCharArchive taskCharArchive;
@XmlAttribute
public long logSize;
@XmlAttribute
public final Integer maximumBranchingFactor;
private GlobalStatsTable() {
this.maximumBranchingFactor = null;
this.taskCharArchive = new TaskCharArchive();
}
public GlobalStatsTable(TaskCharArchive taskCharArchive, long testbedDimension, Integer maximumBranchingFactor) {
this.taskCharArchive = taskCharArchive;
this.logSize = testbedDimension;
this.maximumBranchingFactor = maximumBranchingFactor;
this.initGlobalStatsTable();
}
public GlobalStatsTable(TaskCharArchive taskCharArchive) {
this(taskCharArchive, 0, null);
}
public GlobalStatsTable(TaskCharArchive taskCharArchive, Integer maximumBranchingFactor) {
this(taskCharArchive, 0, maximumBranchingFactor);
}
private void initGlobalStatsTable() {
this.statsTable = new HashMap<TaskChar, LocalStatsWrapper>(this.taskCharArchive.getTaskChars().size(), (float)1.0);
Set<TaskChar> alphabet = this.taskCharArchive.getTaskChars();
if (this.isForBranchedConstraints()) {
for (TaskChar task: this.taskCharArchive.getTaskChars()) {
this.statsTable.put(task, new LocalStatsWrapperForCharsetsWAlternation(taskCharArchive, task, maximumBranchingFactor));
}
} else {
for (TaskChar task: alphabet) {
this.statsTable.put(task, new LocalStatsWrapper(taskCharArchive, task));
}
}
}
public boolean isForBranchedConstraints() {
return maximumBranchingFactor != null && maximumBranchingFactor > 1;
}
@Override
public String toString() {
StringBuilder sBuf = new StringBuilder();
for(TaskChar key: this.statsTable.keySet()) {
StringBuilder aggregateAppearancesBuffer = new StringBuilder();
LocalStatsWrapper statsWrapper = this.statsTable.get(key);
if (statsWrapper.repetitions != null) {
for (Integer counter: statsWrapper.repetitions.keySet()) {
aggregateAppearancesBuffer.append(", <");
aggregateAppearancesBuffer.append(counter);
aggregateAppearancesBuffer.append(", ");
aggregateAppearancesBuffer.append(statsWrapper.repetitions.get(counter));
aggregateAppearancesBuffer.append(">");
}
}
sBuf.append(
"\t[" + key + "\n"
+ "\t aggregate occurrences = {"
+ (aggregateAppearancesBuffer.length() > 0 ? aggregateAppearancesBuffer.substring(2) : "")
+ "}, for a total amount of "
+ statsWrapper.getTotalAmountOfOccurrences()
+ " time(/s)\n");
sBuf.append("\t as the first for " + statsWrapper.getAppearancesAsFirst() + ",");
sBuf.append(" as the last for " + statsWrapper.occurrencesAsLast + " time(/s)");
sBuf.append("\t]\n");
sBuf.append(statsWrapper.toString());
}
return sBuf.toString();
}
public void mergeAdditively(GlobalStatsTable other) {
this.logSize += other.logSize;
for (TaskChar key : this.statsTable.keySet()) {
if (other.statsTable.containsKey(key)) {
logger.trace("Additively merging the statistics tables of " + key);
this.statsTable.get(key).mergeAdditively(other.statsTable.get(key));
}
}
for (TaskChar key : other.statsTable.keySet()) {
if (!this.statsTable.containsKey(key)) {
logger.trace("Additively merging the statistics tables of " + key);
this.statsTable.put(key, other.statsTable.get(key));
}
}
}
public void mergeSubtractively(GlobalStatsTable other) {
this.logSize -= other.logSize;
for (TaskChar key : this.statsTable.keySet()) {
if (other.statsTable.containsKey(key)) {
logger.trace("Subtractively merging the statistics tables of " + key);
this.statsTable.get(key).mergeSubtractively(other.statsTable.get(key));
}
}
for (TaskChar key : other.statsTable.keySet()) {
if (!this.statsTable.containsKey(key)) {
logger.warn("Trying to merge subtractively a part of the stats table that was not included for " + key);
}
}
}
} | 5,317 | 35.930556 | 129 | java |
Janus | Janus-master/src/minerful/miner/stats/LocalStatsWrapper.java | package minerful.miner.stats;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.apache.log4j.Logger;
import minerful.concept.Event;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.miner.stats.xmlenc.LocalStatsMapAdapter;
import minerful.miner.stats.xmlenc.RepetitionsMapAdapter;
@XmlType
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public class LocalStatsWrapper {
@XmlTransient
public static final int FIRST_POSITION_IN_TRACE = 1;
@XmlTransient
protected static Logger logger = Logger.getLogger(LocalStatsWrapper.class);
// TODO Do not consider this a constant, but rather a user-definable
// parameter
/*
* It affects the perspective of the distances computing: either
* event-centric (i.e., every new occurrence of A lets distances calculation
* restart for A w.r.t. the remaining characters) or not (all distances are
* summed up)
*/
public static final boolean EVENT_CENTRIC = false;
@XmlTransient
protected class AlternatingCounterSwitcher {
public boolean alternating = false;
public Integer counter = 0;
public Integer alternationsCounter = 0;
public AlternatingCounterSwitcher() {
}
public AlternatingCounterSwitcher(Integer counter) {
this();
this.counter = counter;
}
public void reset() {
this.alternating = false;
this.counter = 0;
this.alternationsCounter = 0;
}
public int flush() {
int counter = this.counter;
if (alternating) {
this.alternating = false;
this.alternationsCounter++;
this.counter = 0;
return counter;
}
return 0;
}
public void charge() {
if (!this.alternating) {
this.alternating = true;
} else {
this.counter++;
}
}
@Override
public String toString() {
return "AlternatingCounterSwitcher{" + "alternating="
+ this.alternating + ", counter=" + this.counter
+ ", altrn's-counter=" + this.alternationsCounter + '}';
}
}
@XmlTransient
protected TaskChar baseTask;
@XmlTransient
protected TaskCharArchive archive;
@XmlTransient
protected Integer firstOccurrenceAtThisStep;
@XmlTransient
protected SortedSet<Integer> repetitionsAtThisStep;
@XmlElement
@XmlJavaTypeAdapter(value = RepetitionsMapAdapter.class)
public Map<Integer, Integer> repetitions;
@XmlElement(name = "interplayStats")
@XmlJavaTypeAdapter(value = LocalStatsMapAdapter.class)
public Map<TaskChar, StatsCell> interplayStatsTable;
@XmlTransient
protected Map<TaskChar, Integer> neverMoreAppearancesAtThisStep;
@XmlTransient
protected Map<TaskChar, AlternatingCounterSwitcher> alternatingCntSwAtThisStep;
@XmlAttribute
public int occurencesAsFirst;
@XmlAttribute
public int occurrencesAsLast;
@XmlAttribute
protected long totalAmountOfOccurrences;
protected LocalStatsWrapper() {
}
public LocalStatsWrapper(TaskCharArchive archive, TaskChar baseTask) {
this();
this.baseTask = baseTask;
this.archive = archive;
this.initLocalStatsTable(archive.getTaskChars());
this.repetitions = new TreeMap<Integer, Integer>();
this.totalAmountOfOccurrences = 0;
this.occurencesAsFirst = 0;
this.occurrencesAsLast = 0;
}
protected void initLocalStatsTable(Set<TaskChar> alphabet) {
this.interplayStatsTable = new HashMap<TaskChar, StatsCell>(alphabet.size(), (float)1.0);
this.neverMoreAppearancesAtThisStep = new HashMap<TaskChar, Integer>(alphabet.size(), (float)1.0);
this.alternatingCntSwAtThisStep = new HashMap<TaskChar, AlternatingCounterSwitcher>(alphabet.size(), (float)1.0);
for (TaskChar task : alphabet) {
this.interplayStatsTable.put(task, new StatsCell());
if (!task.equals(this.baseTask)) {
this.neverMoreAppearancesAtThisStep.put(task, 0);
this.alternatingCntSwAtThisStep.put(task,
new AlternatingCounterSwitcher());
}
}
}
void newAtPosition(Event event, int position, boolean onwards) {
if (this.archive.containsTaskCharByEvent(event)) {
TaskChar tCh = this.archive.getTaskCharByEvent(event);
/* if the appeared character is equal to this */
if (tCh.equals(this.baseTask)) {
for (TaskChar otherTCh : this.neverMoreAppearancesAtThisStep.keySet()) {
this.neverMoreAppearancesAtThisStep.put(otherTCh,
this.neverMoreAppearancesAtThisStep.get(otherTCh) + 1);
}
/* if this is the first occurrence in the step, record it */
if (this.firstOccurrenceAtThisStep == null) {
this.firstOccurrenceAtThisStep = position;
} else {
/*
* if this is not the first time this chr appears in the step,
* initialize the repetitions register
*/
if (repetitionsAtThisStep == null) {
repetitionsAtThisStep = new TreeSet<Integer>();
}
}
/*
* record the alternation, i.e., the repetition of the chr itself
* between its first appearance and the following different
* character
*/
for (AlternatingCounterSwitcher sw : this.alternatingCntSwAtThisStep
.values()) {
sw.charge();
}
}
/* if the appeared character is NOT equal to this */
else {
AlternatingCounterSwitcher myAltCountSwitcher = this.alternatingCntSwAtThisStep.get(tCh);
StatsCell statsCell = this.interplayStatsTable.get(tCh);
/* store the info that chr appears after the pivot */
this.neverMoreAppearancesAtThisStep.put(tCh, 0);
/* is this reading analysis onwards? */
if (onwards) {// onwards?
/* If there has been an alternation, record it! */
// TODO In the next future
// if (myAltCountSwitcher.alternating)
// statsCell.alternatedOnwards++;
/*
* Record the repetitions in-between (reading the string from
* left to right, i.e., onwards) and restart the counter
*/
statsCell.betweenOnwards += myAltCountSwitcher.flush();
} else {
/* If there has been an alternation, record it! */
// TODO In the next future
// if (myAltCountSwitcher.alternating)
// statsCell.alternatedBackwards++;
/*
* otherwise, record the repetitions in-between (reading the
* string from left to right, i.e., backwards) and restart the
* counter
*/
statsCell.betweenBackwards += myAltCountSwitcher.flush();
}
}
if (repetitionsAtThisStep != null) {
/*
* for each repetition of the same character during the analysis,
* record not only the info of the appearance at a distance equal to
* (chr.position - firstOccurrenceInStep.position), but also at the
* (chr.position - otherOccurrenceInStep.position) for each other
* appearance of the pivot!
*/
/* THIS IS THE VERY BIG TRICK TO AVOID ANY TRANSITIVE CLOSURE!! */
for (Integer occurredAlsoAt : repetitionsAtThisStep) {
this.interplayStatsTable.get(tCh).newAtDistance(position - occurredAlsoAt);
}
}
/*
* If this is not the first occurrence position, record the distance
* equal to (chr.position - firstOccurrenceInStep.position)
*/
if (firstOccurrenceAtThisStep != position) {
/*
* START OF: event-centred analysis modification Comment this line
* to get back to previous version
*/
if (EVENT_CENTRIC) {
if (repetitionsAtThisStep == null || repetitionsAtThisStep.size() < 1) {
this.interplayStatsTable.get(tCh).newAtDistance(
position - firstOccurrenceAtThisStep);
}
} else {
/*
* END OF: event-centred analysis modification
*/
this.interplayStatsTable.get(tCh).newAtDistance(
position - firstOccurrenceAtThisStep);
}
}
/*
* If this is the repetition of the pivot, record it (it is needed for
* the computation of all the other distances!)
*/
if (this.repetitionsAtThisStep != null
&& tCh.equals(this.baseTask)) {
/*
* START OF: event-centred analysis modification Comment these lines
* to get back to previous version
*/
if (EVENT_CENTRIC) {
this.repetitionsAtThisStep.clear();
}
/*
* END OF: event-centred analysis modification
*/
this.repetitionsAtThisStep.add(position);
}
// if (baseCharacter.equals("f")) {System.out.print("Seen " + character
// + " by "); System.out.print(baseCharacter + "\t"); for (String chr:
// neverMoreAppearancesInStep.keySet()) System.out.print(", " + chr +
// ": " + neverMoreAppearancesInStep.get(chr)); System.out.print("\n");}
}
}
protected void setAsNeverAppeared(TaskChar neverAppearedTask) {
if (!neverAppearedTask.equals(this.baseTask)) {
this.interplayStatsTable
.get(neverAppearedTask)
.setAsNeverAppeared(
((this.repetitionsAtThisStep == null || this.repetitionsAtThisStep
.size() < 1) ? 1
: this.repetitionsAtThisStep.size() + 1));
}
}
protected void setAsNeverAppeared(Set<TaskChar> neverAppearedStuff) {
for (TaskChar chr : neverAppearedStuff) {
this.setAsNeverAppeared(chr);
}
}
void finalizeAnalysisStep(boolean onwards, boolean secondPass) {
/*
* Record the amount of occurrences AT THIS STEP and the total amount
* OVER ALL OF THE STEPS
*/
if (!secondPass) {
this.updateAppearancesCounter();
}
if (this.firstOccurrenceAtThisStep != null) {
/* Record what did not appear in the step, afterwards or backwards */
this.recordCharactersThatNeverAppearedAnymoreInStep(onwards);
/* Does NOTHING, at this stage of the implementation */
for (StatsCell cell : this.interplayStatsTable.values()) {
cell.finalizeAnalysisStep(onwards, secondPass);
}
/* Resets the switchers for the alternations counter */
for (AlternatingCounterSwitcher sw : this.alternatingCntSwAtThisStep.values()) {
sw.reset();
}
/* Resets the local stats table counters */
this.firstOccurrenceAtThisStep = null;
this.repetitionsAtThisStep = null;
}
}
protected void recordCharactersThatNeverAppearedAnymoreInStep(
boolean onwards) {
/* For each character, appeared or not in the step */
for (TaskChar tChNoMore : this.neverMoreAppearancesAtThisStep.keySet()) {
/* If it appeared no more */
if (this.neverMoreAppearancesAtThisStep.get(tChNoMore) > 0) {
/* Set it appeared no more */
this.interplayStatsTable.get(tChNoMore).setAsNeverAppearedAnyMore(
this.neverMoreAppearancesAtThisStep.get(tChNoMore),
onwards);
/* Reset the counter! */
this.neverMoreAppearancesAtThisStep.put(tChNoMore, 0);
}
}
if (this.firstOccurrenceAtThisStep != null
&& (this.repetitionsAtThisStep == null || this.repetitionsAtThisStep
.size() == 0)) {
this.interplayStatsTable.get(this.baseTask)
.setAsNeverAppearedAnyMore(1, onwards);
}
}
/**
* Increments (if needed) the appearances as this character as the first,
* records the amount of occurrences AT THIS STEP and increments the total
* amount OVER ALL OF THE STEPS
*/
protected void updateAppearancesCounter() {
Integer numberOfRepetitions = 0;
if (this.firstOccurrenceAtThisStep != null) {
/* Record the amount of appearances at this step */
numberOfRepetitions = this.repetitionsAtThisStep == null ? 1
: this.repetitionsAtThisStep.size() + 1;
/*
* Increment (if needed) the appearances as this character as the
* first
*/
if (this.firstOccurrenceAtThisStep == FIRST_POSITION_IN_TRACE) {
this.occurencesAsFirst++;
}
}
/*
* Increment the amount of appearances counter with data gathered at
* this step
*/
Integer oldNumberOfRepetitionsInFrequencyTable = this.repetitions
.get(numberOfRepetitions);
this.repetitions.put(numberOfRepetitions,
oldNumberOfRepetitionsInFrequencyTable == null ? 1
: 1 + oldNumberOfRepetitionsInFrequencyTable);
/* Increment the total amount of appearances */
this.totalAmountOfOccurrences += numberOfRepetitions;
}
public long getTotalAmountOfOccurrences() {
return this.totalAmountOfOccurrences;
}
public int getAppearancesAsFirst() {
return this.occurencesAsFirst;
}
public int getAppearancesAsLast() {
return this.occurrencesAsLast;
}
@Override
public String toString() {
if (this.totalAmountOfOccurrences == 0)
return "";
StringBuilder sBuf = new StringBuilder();
for (TaskChar key : this.interplayStatsTable.keySet()) {
sBuf.append("\t\t[" + key + "] => "
+ this.interplayStatsTable.get(key).toString());
}
return sBuf.toString();
}
public void mergeAdditively(LocalStatsWrapper other) {
this.occurencesAsFirst += other.occurencesAsFirst;
this.occurrencesAsLast += other.occurrencesAsLast;
this.totalAmountOfOccurrences += other.totalAmountOfOccurrences;
for (Integer numOfReps : this.repetitions.keySet()) {
if (other.repetitions.containsKey(numOfReps)) {
this.repetitions.put(numOfReps, this.repetitions.get(numOfReps) + other.repetitions.get(numOfReps));
}
}
for (Integer numOfReps : other.repetitions.keySet()) {
if (!this.repetitions.containsKey(numOfReps)) {
this.repetitions.put(numOfReps, other.repetitions.get(numOfReps));
}
}
for (TaskChar key : this.interplayStatsTable.keySet()) {
if (other.interplayStatsTable.containsKey(key)) {
this.interplayStatsTable.get(key).mergeAdditively(other.interplayStatsTable.get(key));
}
}
for (TaskChar key : other.interplayStatsTable.keySet()) {
if (!this.interplayStatsTable.containsKey(key)) {
this.interplayStatsTable.put(key, other.interplayStatsTable.get(key));
}
}
/*
for (Integer firstOcc : this.firstOccurrences.keySet()) {
if (other.firstOccurrences.containsKey(firstOcc)) {
this.firstOccurrences.put(firstOcc, this.firstOccurrences.get(firstOcc) + other.firstOccurrences.get(firstOcc));
}
}
for (Integer firstOcc : other.firstOccurrences.keySet()) {
if (!this.firstOccurrences.containsKey(firstOcc)) {
this.firstOccurrences.put(firstOcc, other.firstOccurrences.get(firstOcc));
}
}
*/
}
public void mergeSubtractively(LocalStatsWrapper other) {
this.occurencesAsFirst -= other.occurencesAsFirst;
this.occurrencesAsLast -= other.occurrencesAsLast;
this.totalAmountOfOccurrences -= other.totalAmountOfOccurrences;
for (Integer numOfReps : this.repetitions.keySet()) {
if (other.repetitions.containsKey(numOfReps)) {
this.repetitions.put(numOfReps, this.repetitions.get(numOfReps) - other.repetitions.get(numOfReps));
}
}
for (Integer numOfReps : other.repetitions.keySet()) {
if (!this.repetitions.containsKey(numOfReps)) {
logger.warn("Trying to merge subtractively a number of repetitions that were not included for " + numOfReps);
}
}
for (TaskChar key : this.interplayStatsTable.keySet()) {
if (other.interplayStatsTable.containsKey(key)) {
this.interplayStatsTable.get(key).mergeSubtractively(other.interplayStatsTable.get(key));
}
}
for (TaskChar key : other.interplayStatsTable.keySet()) {
if (!this.interplayStatsTable.containsKey(key)) {
logger.warn("Trying to merge subtractively interplay stats that were not included for " + key);
}
}
}
} | 15,450 | 32.37149 | 116 | java |
Janus | Janus-master/src/minerful/miner/stats/LocalStatsWrapperForCharsets.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.miner.stats;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.miner.stats.charsets.FixedTaskSetIncrementalCountersCollection;
public abstract class LocalStatsWrapperForCharsets extends LocalStatsWrapper {
protected FixedTaskSetIncrementalCountersCollection neverAppearedCharacterSets;
protected FixedTaskSetIncrementalCountersCollection neverMoreAppearedAfterCharacterSets;
protected FixedTaskSetIncrementalCountersCollection neverMoreAppearedBeforeCharacterSets;
protected FixedTaskSetIncrementalCountersCollection repetitionsBeforeCharactersAppearingAfter;
protected FixedTaskSetIncrementalCountersCollection repetitionsAfterCharactersAppearingBefore;
protected FixedTaskSetIncrementalCountersCollection neverMoreAfterLastOccurrenceCharacterSets;
protected FixedTaskSetIncrementalCountersCollection neverMoreBeforeFirstOccurrenceCharacterSets;
protected Integer maximumTasksSetSize;
public LocalStatsWrapperForCharsets(TaskCharArchive archive, TaskChar baseTask) {
super(archive, baseTask);
}
public FixedTaskSetIncrementalCountersCollection getNeverAppearedCharacterSets() {
return neverAppearedCharacterSets;
}
public FixedTaskSetIncrementalCountersCollection getNeverMoreAppearedAfterCharacterSets() {
return neverMoreAppearedAfterCharacterSets;
}
public FixedTaskSetIncrementalCountersCollection getNeverMoreAppearedBeforeCharacterSets() {
return neverMoreAppearedBeforeCharacterSets;
}
public FixedTaskSetIncrementalCountersCollection getRepetitionsBeforeCharactersAppearingAfter() {
return repetitionsBeforeCharactersAppearingAfter;
}
public FixedTaskSetIncrementalCountersCollection getRepetitionsAfterCharactersAppearingBefore() {
return repetitionsAfterCharactersAppearingBefore;
}
public FixedTaskSetIncrementalCountersCollection getNeverMoreAfterLastOccurrenceCharacterSets() {
return neverMoreAfterLastOccurrenceCharacterSets;
}
public FixedTaskSetIncrementalCountersCollection getNeverMoreBeforeFirstOccurrenceCharacterSets() {
return neverMoreBeforeFirstOccurrenceCharacterSets;
}
@Override
public void mergeAdditively(LocalStatsWrapper other) {
if (!(other instanceof LocalStatsWrapperForCharsets)) {
// If you can read this, I am already far from you
throw new IllegalArgumentException("Unsummable pears with apples");
}
super.mergeAdditively(other);
LocalStatsWrapperForCharsets otro = (LocalStatsWrapperForCharsets) other;
neverAppearedCharacterSets.merge(otro.neverAppearedCharacterSets);
neverMoreAppearedAfterCharacterSets.merge(otro.neverMoreAppearedAfterCharacterSets);
neverMoreAppearedBeforeCharacterSets.merge(otro.neverMoreAppearedBeforeCharacterSets);
neverMoreAfterLastOccurrenceCharacterSets.merge(otro.neverMoreAfterLastOccurrenceCharacterSets);
neverMoreBeforeFirstOccurrenceCharacterSets.merge(otro.neverMoreBeforeFirstOccurrenceCharacterSets);
repetitionsBeforeCharactersAppearingAfter.merge(otro.repetitionsBeforeCharactersAppearingAfter);
repetitionsAfterCharactersAppearingBefore.merge(otro.repetitionsAfterCharactersAppearingBefore);
}
} | 3,219 | 47.787879 | 102 | java |
Janus | Janus-master/src/minerful/miner/stats/LocalStatsWrapperForCharsetsWAlternation.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.miner.stats;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import javax.xml.bind.annotation.XmlTransient;
import minerful.concept.Event;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.miner.stats.charsets.FixedTaskSetIncrementalCountersCollection;
public class LocalStatsWrapperForCharsetsWAlternation extends LocalStatsWrapperForCharsets {
@XmlTransient
protected Map<TaskChar, Integer> missingAtThisStepBeforeNextRepetition;
public LocalStatsWrapperForCharsetsWAlternation(TaskCharArchive archive, TaskChar baseTask, Integer maximumCharactersSetSize) {
super(archive, baseTask);
Set<TaskChar> alphabet = archive.getTaskChars();
this.neverAppearedCharacterSets = new FixedTaskSetIncrementalCountersCollection(alphabet);
this.neverMoreAppearedAfterCharacterSets = new FixedTaskSetIncrementalCountersCollection(alphabet);
this.neverMoreAppearedBeforeCharacterSets = new FixedTaskSetIncrementalCountersCollection(alphabet);
this.maximumTasksSetSize = (
maximumCharactersSetSize == null ? null :
(maximumCharactersSetSize < archive.size() ? maximumCharactersSetSize : archive.size())
);
// if (this.maximumTasksSetSize != null && this.maximumTasksSetSize < archive.size()) {
this.repetitionsBeforeCharactersAppearingAfter = new FixedTaskSetIncrementalCountersCollection(alphabet);
this.repetitionsAfterCharactersAppearingBefore = new FixedTaskSetIncrementalCountersCollection(alphabet);
this.neverMoreAfterLastOccurrenceCharacterSets = new FixedTaskSetIncrementalCountersCollection(alphabet);
this.neverMoreBeforeFirstOccurrenceCharacterSets = new FixedTaskSetIncrementalCountersCollection(alphabet);
// } else {
// this.repetitionsBeforeCharactersAppearingAfter = new FixedTaskSetIncrementalCountersCollection(alphabet);
// this.repetitionsAfterCharactersAppearingBefore = new FixedTaskSetIncrementalCountersCollection(alphabet);
// this.neverMoreAfterLastOccurrenceCharacterSets = new FixedTaskSetIncrementalCountersCollection(alphabet);
// this.neverMoreBeforeLastOccurrenceCharacterSets = new FixedTaskSetIncrementalCountersCollection(alphabet);
// }
}
public LocalStatsWrapperForCharsetsWAlternation(TaskCharArchive archive, TaskChar baseTask) {
this(archive, baseTask, null);
}
public int getMaximumCharactersSetSize() {
return this.maximumTasksSetSize;
}
@Override
protected void initLocalStatsTable(Set<TaskChar> alphabet) {
super.initLocalStatsTable(alphabet);
this.missingAtThisStepBeforeNextRepetition = new TreeMap<TaskChar, Integer>();
for (TaskChar task : alphabet) {
this.interplayStatsTable.put(task, new StatsCell());
if (!task.equals(this.baseTask)) {
this.missingAtThisStepBeforeNextRepetition.put(task, 0);
}
}
}
@Override
void newAtPosition(Event event, int position, boolean onwards) {
if (this.archive.containsTaskCharByEvent(event)) {
TaskChar tCh = this.archive.getTaskCharByEvent(event);
if (tCh.equals(this.baseTask)) {
for (TaskChar nevMoreAppTCh: this.neverMoreAppearancesAtThisStep.keySet()) {
this.neverMoreAppearancesAtThisStep.put(nevMoreAppTCh,
this.neverMoreAppearancesAtThisStep.get(nevMoreAppTCh) + 1
);
}
/* if this is the first occurrence in the step, record it */
if (this.firstOccurrenceAtThisStep == null) {
this.firstOccurrenceAtThisStep = position;
} else {
/* if this is not the first time this chr appears in the step, initialize the repetitions register */
if (repetitionsAtThisStep == null) {
repetitionsAtThisStep = new TreeSet<Integer>();
}
}
if (onwards) {
this.repetitionsBeforeCharactersAppearingAfter.merge(
FixedTaskSetIncrementalCountersCollection.fromNumberedSingletons(missingAtThisStepBeforeNextRepetition)
);
}
else {
this.repetitionsAfterCharactersAppearingBefore.merge(
FixedTaskSetIncrementalCountersCollection.fromNumberedSingletons(missingAtThisStepBeforeNextRepetition)
);
}
/*
* So far, we have saved the information that in this sub-trace
* delimited by the previous occurrence of the pivot and this
* one, some elements did not occur. Now the count has to start
* again from 1 though, and not from 0, because the new
* occurrence of the pivot is the start of a new sub-trace. To
* understand the rationale, figure what it happened if the
* pivot was the last element in the trace, and 0 was assigned
* to every element in missingAtThisStepBeforeNextRepetition.
*/
for (TaskChar chr: this.missingAtThisStepBeforeNextRepetition.keySet()) {
this.missingAtThisStepBeforeNextRepetition.put(chr, 1);
}
}
/* if the appeared character is NOT equal to this */
else {
/* store the info that chr appears after the pivot */
this.neverMoreAppearancesAtThisStep.put(tCh, 0);
this.missingAtThisStepBeforeNextRepetition.put(tCh, 0);
}
if (repetitionsAtThisStep != null) {
/* For each repetition of the same element during the analysis, record not only the info of the appearance at a distance equal to (chr.position - firstOccurrenceInStep.position), but also at the (chr.position - otherOccurrenceInStep.position) for each other appearance of the pivot! */
/* THIS IS THE VERY BIG TRICK TO AVOID ANY TRANSITIVE CLOSURE!! */
for (Integer occurredAlsoAt : repetitionsAtThisStep) {
this.interplayStatsTable.get(tCh).newAtDistance(position - occurredAlsoAt);
}
}
/* If this is not the first occurrence position, record the distance equal to (chr.position - firstOccurrenceInStep.position) */
if (firstOccurrenceAtThisStep != position)
this.interplayStatsTable.get(tCh).newAtDistance(position - firstOccurrenceAtThisStep);
/* If this is the repetition of the pivot, record it (it is needed for the computation of all the other distances!) */
if (this.repetitionsAtThisStep != null && tCh.equals(this.baseTask)) {
this.repetitionsAtThisStep.add(position);
}
}
}
@Override
void finalizeAnalysisStep(boolean onwards, boolean secondPass) {
// Step 1: as in the single-char case:
/* Record the amount of occurrences AT THIS STEP and the total amount OVER ALL OF THE STEPS */
/* Record what did not appear in the step, afterwards or backwards */
/* Resets the switchers for the alternations counter */
/* Resets the local stats table counters */
super.finalizeAnalysisStep(onwards, secondPass);
// Flush down all missing occurrences of other elements at the end as negative cases for the in-between repetitions
if (onwards) {
this.neverMoreAfterLastOccurrenceCharacterSets.merge(
FixedTaskSetIncrementalCountersCollection.fromNumberedSingletons(missingAtThisStepBeforeNextRepetition)
);
}
else {
this.neverMoreBeforeFirstOccurrenceCharacterSets.merge(
FixedTaskSetIncrementalCountersCollection.fromNumberedSingletons(missingAtThisStepBeforeNextRepetition)
);
}
for (TaskChar chr: this.missingAtThisStepBeforeNextRepetition.keySet()) {
this.missingAtThisStepBeforeNextRepetition.put(chr, 0);
}
}
@Override
protected void setAsNeverAppeared(Set<TaskChar> neverAppearedStuff) {
if (neverAppearedStuff.size() < 1) {
return;
}
// Step 1: each character in neverAppearedStuff must be recorded as never appearing in the current string
for (TaskChar neverAppearedChr : neverAppearedStuff) {
super.setAsNeverAppeared(neverAppearedChr);
}
// Step 2: the whole set of characters has to be recorded at once
// Step 2 is needed because Step 1 loses the information that all of the char's are not read at once all together in a string
addSetToNeverAppearedCharSets(
neverAppearedStuff,
((this.repetitionsAtThisStep == null || this.repetitionsAtThisStep.size() < 1)
? 1
: this.repetitionsAtThisStep.size() + 1
)
);
}
protected void addSetToNeverAppearedCharSets(Set<TaskChar> neverAppearedStuff, int sum) {
this.neverAppearedCharacterSets.incrementAt(neverAppearedStuff, sum);
}
@Override
protected void recordCharactersThatNeverAppearedAnymoreInStep(boolean onwards) {
// Step 1: aggregate this.neverMoreAppearancesInStep and record
if (onwards) {
this.neverMoreAppearedAfterCharacterSets.merge(
FixedTaskSetIncrementalCountersCollection.fromNumberedSingletons(neverMoreAppearancesAtThisStep)
);
} else {
this.neverMoreAppearedBeforeCharacterSets.merge(
FixedTaskSetIncrementalCountersCollection.fromNumberedSingletons(neverMoreAppearancesAtThisStep)
);
}
// Step 2: update singletons
super.recordCharactersThatNeverAppearedAnymoreInStep(onwards);
}
@Override
public String toString() {
if (this.totalAmountOfOccurrences == 0)
return "";
StringBuilder sBuf = new StringBuilder();
for (TaskChar key : this.interplayStatsTable.keySet()) {
sBuf.append("\t\t[" + key + "] => " + this.interplayStatsTable.get(key).toString());
}
sBuf.append("\n\t\t\tnever's " + this.neverAppearedCharacterSets.toString().replace("\n", "\n\t\t\t\t"));
sBuf.append("\n\t\t\tnever-after's " + this.neverMoreAppearedAfterCharacterSets.toString().replace("\n", "\n\t\t\t\t"));
sBuf.append("\n\t\t\tnever-before's " + this.neverMoreAppearedBeforeCharacterSets.toString().replace("\n", "\n\t\t\t\t"));
sBuf.append("\n\t\t\trepetitions-in-between-after's " + this.repetitionsBeforeCharactersAppearingAfter.toString().replace("\n", "\n\t\t\t\t"));
sBuf.append("\n\t\t\tmissing-after-last's " + this.neverMoreAfterLastOccurrenceCharacterSets.toString().replace("\n", "\n\t\t\t\t"));
sBuf.append("\n\t\t\trepetitions-in-between-before's " + this.repetitionsAfterCharactersAppearingBefore.toString().replace("\n", "\n\t\t\t\t"));
sBuf.append("\n\t\t\tmissing-before-first's " + this.neverMoreBeforeFirstOccurrenceCharacterSets.toString().replace("\n", "\n\t\t\t\t"));
sBuf.append("\n");
return sBuf.toString();
}
} | 10,077 | 44.60181 | 289 | java |
Janus | Janus-master/src/minerful/miner/stats/OccurrencesStatsBuilder.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.miner.stats;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import minerful.concept.Event;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.logparser.LogParser;
import minerful.logparser.LogTraceParser;
import minerful.utils.MessagePrinter;
import org.apache.log4j.Logger;
public class OccurrencesStatsBuilder {
private static final int PROGRESS_BAR_SCALE = 40;
protected static Logger logger;
public static final boolean ONWARDS = true;
public static final boolean BACKWARDS = !ONWARDS;
private TaskCharArchive taskCharArchive;
private GlobalStatsTable statsTable;
// private Character contemporaneityDelimiter = null;
// private void commonConstructorOperations(Character[] alphabet, Character contemporaneityDelimiter) {
private void commonConstructorOperations(TaskCharArchive archive) {
this.taskCharArchive = archive;
// this.contemporaneityDelimiter = contemporaneityDelimiter;
if (logger == null) {
logger = Logger.getLogger(this.getClass().getCanonicalName());
}
}
// public OccurrencesStatsBuilder(Character[] alphabet, Character contemporaneityDelimiter, Integer maximumBranchingFactor) {
// this.commonConstructorOperations(alphabet, contemporaneityDelimiter);
public OccurrencesStatsBuilder(TaskCharArchive archive, Integer maximumBranchingFactor) {
this.commonConstructorOperations(archive);
this.statsTable = new GlobalStatsTable(archive, maximumBranchingFactor);
}
public GlobalStatsTable checkThisOut(LogParser logParser) {
this.statsTable.logSize += logParser.length();
this.checkThisOut(logParser, ONWARDS);
this.checkThisOut(logParser, BACKWARDS, true);
return this.statsTable;
}
public GlobalStatsTable checkThisOut(LogParser logParser, boolean onwards) {
return this.checkThisOut(logParser, onwards, false);
}
public GlobalStatsTable checkThisOut(LogParser logParser, boolean onwards, boolean secondPass) {
// for the sake of robustness
int counter = 0;
int analysedPortion = 0;
Iterator<LogTraceParser> traceParsersIterator = logParser.traceIterator();
LogTraceParser auxTraceParser = null;
SortedSet<TaskChar> occurredEvents = null;
Event auxEvent = null;
TaskChar auxTaskChar = null;
while (traceParsersIterator.hasNext()) {
auxTraceParser = traceParsersIterator.next();
if (!onwards) {
auxTraceParser.reverse();
}
auxTraceParser.init();
occurredEvents = new TreeSet<TaskChar>();
auxEvent = null;
int positionCursor = 0;
// boolean contemporaneity = false;
while (!auxTraceParser.isParsingOver()) {
auxEvent = auxTraceParser.parseSubsequent().getEvent();
// if (chr.equals(this.contemporaneityDelimiter)) {
// contemporaneity = true;
// } else {
// for the sake of robustness
// if (!contemporaneity) {
positionCursor++;
// } else {
// contemporaneity = false;
// }
if (this.statsTable.taskCharArchive.containsTaskCharByEvent(auxEvent)) {
auxTaskChar = this.statsTable.taskCharArchive.getTaskCharByEvent(auxEvent);
// record the occurrence of this chr in the current string
occurredEvents.add(auxTaskChar);
for (TaskChar appChr : occurredEvents) {
// for each already appeared chr, register the new occurrence of the current in its own stats table, at the proper distance.
this.statsTable.statsTable.get(appChr).newAtPosition(
auxEvent,
( onwards
? positionCursor
: 0 - positionCursor
),
onwards
);
}
}
// }
}
if (!secondPass) {
/* Record the information about which the last task is! */
if (auxTaskChar != null)
this.statsTable.statsTable.get(auxTaskChar).occurrencesAsLast += 1;
/* Record which character did not ever appear in the local stats tables! */
this.setNeverAppearedStuffAtThisStep(occurredEvents);
}
/*
* Reset local stats table counters,
* increment the appearances of the character at position 1 in the string as the first,
* record the amount of occurrences AT THIS STEP and increment the total amount OVER ALL OF THE STEPS,
* reset the switchers for the alternations counters
*/
this.finalizeAnalysisStep(onwards, secondPass);
counter++;
if ( counter > logParser.length() / PROGRESS_BAR_SCALE * (analysedPortion+1) ) {
for (int i = analysedPortion +1;
i < ((double)counter / logParser.length() * PROGRESS_BAR_SCALE);
i++) {
System.out.print("|");
}
analysedPortion = (int) Math.floor((double)counter / logParser.length() * PROGRESS_BAR_SCALE);
}
/*
* If the analysis is made backwards, we should toggle the reverse sense of reading again to put the log parser in its initial status
*/
if (!onwards) {
auxTraceParser.reverse();
}
}
if (secondPass) { MessagePrinter.printlnOut(""); }
return this.statsTable;
}
private void finalizeAnalysisStep(boolean onwards, boolean secondPass) {
for (TaskChar key: this.taskCharArchive.getTaskChars()) {
this.statsTable.statsTable.get(key).finalizeAnalysisStep(onwards, secondPass);
}
}
private void setNeverAppearedStuffAtThisStep(Set<TaskChar> appearedTasks) {
List<TaskChar> differenceStuff = new ArrayList<TaskChar>(this.taskCharArchive.size());
for (TaskChar task : this.taskCharArchive.getTaskChars()) {
differenceStuff.add(task);
}
Set<TaskChar> neverAppearedStuff = new HashSet<TaskChar>(differenceStuff);
neverAppearedStuff.removeAll(appearedTasks);
if (neverAppearedStuff.size() > 0) {
for (TaskChar appearedChr : appearedTasks) {
this.statsTable.statsTable.get(appearedChr).setAsNeverAppeared(neverAppearedStuff);
}
}
}
} | 7,035 | 40.388235 | 149 | java |
Janus | Janus-master/src/minerful/miner/stats/StatsCell.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.miner.stats;
import java.util.NavigableMap;
import java.util.TreeMap;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.apache.log4j.Logger;
import minerful.miner.stats.xmlenc.DistancesMapAdapter;
@XmlType
@XmlAccessorType(XmlAccessType.FIELD)
public class StatsCell implements Cloneable {
@XmlTransient
public static final int NEVER_ONWARDS = Integer.MAX_VALUE;
@XmlTransient
public static final int NEVER_BACKWARDS = Integer.MIN_VALUE;
@XmlTransient
public static final int NEVER_EVER = 0;
@XmlTransient
protected static Logger logger = Logger.getLogger(StatsCell.class);
@XmlJavaTypeAdapter(value=DistancesMapAdapter.class)
public NavigableMap<Integer, Integer> distances;
@XmlElement(name="repetitionsInBetweenOnwards")
public int betweenOnwards;
@XmlElement(name="repetitionsInBetweenBackwards")
public int betweenBackwards;
public StatsCell() {
this.distances = new TreeMap<Integer, Integer>();
this.betweenOnwards = 0;
this.betweenBackwards = 0;
}
void newAtDistance(int distance) {
this.newAtDistance(distance, 1);
}
void newAtDistance(int distance, int quantity) {
Integer distanceCounter = this.distances.get(distance);
distanceCounter = (distanceCounter == null ? quantity : distanceCounter + quantity);
this.distances.put(distance, distanceCounter);
}
void setAsNeverAppeared(int quantity) {
this.newAtDistance(NEVER_EVER, quantity);
}
void setAsNeverAppearedAnyMore(int quantity, boolean onwards) {
this.newAtDistance(
(onwards ? NEVER_ONWARDS : NEVER_BACKWARDS),
quantity
);
}
/**
* It does nothing, at this stage of the implementation!
* @param onwards
* @param secondPass
*/
void finalizeAnalysisStep(boolean onwards, boolean secondPass) {
}
@Override
public String toString() {
StringBuilder sBuf = new StringBuilder();
if (this.distances.keySet() == null || this.distances.keySet().size() == 0)
return "{}\n";
for (Integer key : this.distances.keySet()) {
sBuf.append(", <");
switch(key) {
case NEVER_ONWARDS:
sBuf.append("Never more");
break;
case NEVER_BACKWARDS:
sBuf.append("Never before");
break;
case NEVER_EVER:
sBuf.append("Never");
break;
default:
sBuf.append(String.format("%+d", key));
break;
}
sBuf.append(", "
+ this.distances.get(key)
+ ">");
}
sBuf.append("} time(/s)");
sBuf.append(", alternating: {onwards = ");
sBuf.append(this.betweenOnwards);
sBuf.append(", backwards = ");
sBuf.append(this.betweenBackwards);
sBuf.append("} time(/s)\n");
return "{" + sBuf.substring(2);
}
@Override
public Object clone() {
StatsCell clone = new StatsCell();
clone.distances = new TreeMap<Integer, Integer>(this.distances);
return clone;
}
public double howManyTimesItNeverAppearedBackwards() {
if (this.distances.containsKey(NEVER_BACKWARDS))
return this.distances.get(NEVER_BACKWARDS);
return 0;
}
public double howManyTimesItNeverAppearedOnwards() {
if (this.distances.containsKey(NEVER_ONWARDS))
return this.distances.get(NEVER_ONWARDS);
return 0;
}
public double howManyTimesItNeverAppearedAtAll() {
if (this.distances.containsKey(NEVER_EVER))
return this.distances.get(NEVER_EVER);
return 0;
}
public void mergeAdditively(StatsCell other) {
this.betweenBackwards += other.betweenBackwards;
this.betweenOnwards += other.betweenOnwards;
for (Integer distance : this.distances.keySet()) {
if (other.distances.containsKey(distance)) {
this.distances.put(distance, this.distances.get(distance) + other.distances.get(distance));
}
}
for (Integer distance : other.distances.keySet()) {
if (!this.distances.containsKey(distance)) {
this.distances.put(distance, other.distances.get(distance));
}
}
}
public void mergeSubtractively(StatsCell other) {
this.betweenBackwards -= other.betweenBackwards;
this.betweenOnwards -= other.betweenOnwards;
for (Integer distance : this.distances.keySet()) {
if (other.distances.containsKey(distance)) {
this.distances.put(distance, this.distances.get(distance) - other.distances.get(distance));
}
}
for (Integer distance : other.distances.keySet()) {
if (!this.distances.containsKey(distance)) {
logger.warn("Trying to merge subtractively distance stats that were not included for " + distance);
}
}
}
} | 5,385 | 30.313953 | 103 | java |
Janus | Janus-master/src/minerful/miner/stats/charsets/FixedTaskSetIncrementalCountersCollection.java | package minerful.miner.stats.charsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import minerful.concept.TaskChar;
public class FixedTaskSetIncrementalCountersCollection extends TaskSetCountersCollection {
public FixedTaskSetIncrementalCountersCollection(Set<TaskChar> alphabet) {
this.tasksSetCounterCollection = new TreeSet<TasksSetCounter>();
this.singleTaskIndexer = new HashMap<TaskChar, TreeSet<TasksSetCounter>>(alphabet.size());
this.setupSingleCharIndexer(alphabet);
}
private void setupSingleCharIndexer(Set<TaskChar> alphabet) {
for (TaskChar chr : alphabet) {
this.singleTaskIndexer.put(chr, new TreeSet<TasksSetCounter>());
}
}
@Override
protected void reIndex(Set<TaskChar> stuff, TasksSetCounter indexed) {
for (TaskChar chr : stuff) {
this.singleTaskIndexer.get(chr).add(indexed);
}
}
/**
* This is the second best idea applied in this algorithm, after the one avoiding the transitive closure.
* @param source A Map connecting a numeric value to single characters
* @return An aggregated information concerning subsets of characters, given the set of keys in the <code>source</code> Map
*/
public static FixedTaskSetIncrementalCountersCollection fromNumberedSingletons(Map<TaskChar, Integer> source) {
// The return value
FixedTaskSetIncrementalCountersCollection charSetCountColln = null;
// Key idea 1: let us revert numbers and characters: we aggregate characters sharing the same numeric value!
Map<Integer, SortedSet<TaskChar>> reversedMap = new TreeMap<Integer, SortedSet<TaskChar>>();
// Temporary variable, storing those numeric values that are associated to characters
int auxSum = 0;
// This variable will come into play later. Please wait... By now, just remind that it's meant to record the numeric values acting as keys in reversedMap, in ascending order
Set<Integer> sortedSums = new TreeSet<Integer>();
// The "local" alphabet of characters
SortedSet<TaskChar> alphaList = new TreeSet<TaskChar>();
for (TaskChar key : source.keySet()) {
auxSum = source.get(key);
// We do not care about 0's
if (auxSum > 0) {
if (!reversedMap.containsKey(auxSum)) {
reversedMap.put(auxSum, new TreeSet<TaskChar>());
}
// If it was already there, no problem! It won't be added, actually.
reversedMap.get(auxSum).add(key);
// Read above!
alphaList.add(key);
sortedSums.add(auxSum);
}
}
/*
* Refactoring phase! E.g., say
* a => 2, b => 3, c => 2.
* This means that
* {b} => 3 and {a, b, c} => 2
* Up to this point, we would have
* {a, c} => 2 and
* {b} => 3
* This means, in turn, that you have to iteratively "propagate"
* characters from the top-rated (in terms of count) sets to the
* lower-rated sets. In the example, you have to
* add {b} (rated 3) into
* {a, c} (rated 2)
* so to have {a, b, c} rated 2.
*/
Integer[] sortedSumsArray = sortedSums.toArray(new Integer[0]);
// Here we sort the array. Well, it might be useless, as the TreeSet already uses an ascending order over stored values, but you can never know...
Arrays.sort(sortedSumsArray);
// From the highest numeric value, to the lowest...
for (int i = sortedSumsArray.length -1; i > 0; i--) {
// Get the numeric value currently below this and add all its associated characters
reversedMap.get(sortedSumsArray[i-1])
.addAll(reversedMap.get(sortedSumsArray[i]));
}
/*
* Now we have:
* {a, b, c} = 2
* {b} = 3
* Which is fine. But we want to consider the *delta* values.
* Now we know for sure that the numbers are associated to sets for which a STRICT DESCENDING containment order holds as numbers grow, due to the "propagation" technique that we adopted.
* Therefore, we want something like this now:
* {a, b, c} = 2
* {b} = 1
*/
charSetCountColln = fromCountedCharSets(
reversedMap,
alphaList
);
return charSetCountColln;
}
public static FixedTaskSetIncrementalCountersCollection fromCountedCharSets(
Map<Integer, SortedSet<TaskChar>> counterForCharSets, Set<TaskChar> alphabet) {
FixedTaskSetIncrementalCountersCollection charSetConCol = new FixedTaskSetIncrementalCountersCollection(alphabet);
SortedSet<TaskChar> auxCharSet = null;
int difference = 0;
// Ascending order
for (Integer keyInt : counterForCharSets.keySet()) {
auxCharSet = counterForCharSets.get(keyInt);
charSetConCol.incrementAt(auxCharSet, keyInt - difference);
difference += keyInt - difference;
}
return charSetConCol;
}
public void merge(FixedTaskSetIncrementalCountersCollection other) {
for (TasksSetCounter otherCharSetCounter : other.tasksSetCounterCollection) {
this.incrementAt(otherCharSetCounter.getTaskCharSet(), otherCharSetCounter.getCounter());
}
}
public SortedSet<TasksSetCounter> selectCharSetCountersSharedAmong(
Collection<TaskChar> sharingTasks) {
Iterator<TaskChar> taskIterator = sharingTasks.iterator();
TaskChar currTask = null;
TreeSet<TasksSetCounter>
shared =
new TreeSet<TasksSetCounter>(),
tmpShared =
null;
if (taskIterator.hasNext()) {
currTask = taskIterator.next();
shared = new TreeSet<TasksSetCounter>(singleTaskIndexer.get(currTask));
} else {
return shared;
}
while(taskIterator.hasNext()) {
currTask = taskIterator.next();
tmpShared = singleTaskIndexer.get(currTask);
shared.retainAll(tmpShared);
}
return shared;
}
public SortedSet<TasksSetCounter> selectCharSetCountersSharedAmong(
TaskChar[] sharingTasks) {
TreeSet<TasksSetCounter>
shared =
null,
tmpShared =
null;
if (sharingTasks.length > 0) {
for (TaskChar currTask : sharingTasks) {
if (shared == null) {
shared = new TreeSet<TasksSetCounter>(singleTaskIndexer.get(currTask));
} else {
tmpShared = singleTaskIndexer.get(currTask);
shared.retainAll(tmpShared);
}
}
}
return shared;
}
@Override
public SortedSet<TasksSetCounter> getCharactersSetsOrderedByAscendingCounter() {
SortedSet<TasksSetCounter> nuCharSetCounter =
new TreeSet<TasksSetCounter>(
new TasksSetCounter.TaskSetByAscendingCounterComparator()
);
nuCharSetCounter.addAll(this.tasksSetCounterCollection);
return nuCharSetCounter;
}
} | 6,486 | 33.142105 | 188 | java |
Janus | Janus-master/src/minerful/miner/stats/charsets/TaskSetCountersCollection.java | package minerful.miner.stats.charsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import minerful.concept.TaskChar;
public abstract class TaskSetCountersCollection implements Cloneable {
protected TreeSet<TasksSetCounter> tasksSetCounterCollection;
protected Map<TaskChar, TreeSet<TasksSetCounter>> singleTaskIndexer;
public TaskSetCountersCollection() {
super();
}
public TasksSetCounter incrementAt(Set<TaskChar> stuff) {
return this.incrementAt(stuff, 1);
}
protected abstract void reIndex(Set<TaskChar> stuff, TasksSetCounter indexed);
protected boolean addAndReIndex(Set<TaskChar> charsInNuTaskCharsSetCounter, TasksSetCounter nuTaskCharsSetCounter) {
if (this.tasksSetCounterCollection.add(nuTaskCharsSetCounter)) {
reIndex(charsInNuTaskCharsSetCounter, nuTaskCharsSetCounter);
return true;
}
return false;
}
public TreeSet<TasksSetCounter> getTaskCharsSetCounterCollection() {
return this.tasksSetCounterCollection;
}
public TasksSetCounter incrementAt(TaskChar charInNuTaskCharsSetCounter, int sum) {
Set<TaskChar> charsInNuTaskCharsSetCounter = new TreeSet<TaskChar>();
charsInNuTaskCharsSetCounter.add(charInNuTaskCharsSetCounter);
return this.incrementAt(charsInNuTaskCharsSetCounter, sum);
}
public TasksSetCounter incrementAt(Set<TaskChar> charsInNuTaskCharsSetCounter, int sum) {
// This is the haystack
TasksSetCounter needle = new TasksSetCounter(charsInNuTaskCharsSetCounter);
int nuCounter = 0;
if (this.tasksSetCounterCollection.contains(needle)) {
nuCounter = this.tasksSetCounterCollection.floor(needle).incrementCounter(sum);
} else {
this.addAndReIndex(charsInNuTaskCharsSetCounter, needle);
nuCounter = needle.incrementCounter(sum);
}
return needle;
}
public SortedSet<TasksSetCounter> getTaskCharsSetsOrderedByAscendingCounter() {
SortedSet<TasksSetCounter> nuCharSetCounter =
new TreeSet<TasksSetCounter>(
new TasksSetCounter.TaskSetByAscendingCounterComparator()
);
nuCharSetCounter.addAll(this.tasksSetCounterCollection);
return nuCharSetCounter;
}
@Override
public String toString() {
StringBuilder sBuil = new StringBuilder();
sBuil.append('\n');
sBuil.append(this.getClass().getName().substring(this.getClass().getName().lastIndexOf('.')+1));
sBuil.append(": {");
sBuil.append("\n\tList:");
for (TasksSetCounter chSCnt : this.tasksSetCounterCollection) {
sBuil.append(chSCnt.toString().replace("\n", "\n\t\t"));
}
sBuil.append("\n\tIndexed:");
for (TaskChar chr : this.singleTaskIndexer.keySet()) {
sBuil.append("\n\t\tchr=");
sBuil.append(chr);
sBuil.append(" => {");
sBuil.append(this.singleTaskIndexer.get(chr).toString().replace("\n", "\n\t\t\t"));
sBuil.append("\n\t\t}");
}
sBuil.append("\n}");
return sBuil.toString();
}
@Override
protected Object clone() throws CloneNotSupportedException {
// TODO Auto-generated method stub
return super.clone();
}
public TasksSetCounter get(Collection<TaskChar> indexer) {
SortedSet<TasksSetCounter> tailSet = this.tasksSetCounterCollection.tailSet(new TasksSetCounter(indexer));
if (tailSet == null || tailSet.size() == 0)
return null;
return tailSet.first();
}
public TasksSetCounter getNearest(Collection<TaskChar> indexer) {
TasksSetCounter nearest = this.get(indexer);
if (nearest != null) {
return nearest;
} else {
Collection<TaskChar> indexedCharsWithinIndexer = new ArrayList<TaskChar>();
for (TaskChar singleIndex : indexer) {
if (this.singleTaskIndexer.containsKey(singleIndex))
indexedCharsWithinIndexer.add(singleIndex);
}
return this.get(indexedCharsWithinIndexer);
}
}
public TasksSetCounter get(TaskChar indexer) {
return this.tasksSetCounterCollection.tailSet(new TasksSetCounter(indexer)).first();
}
public SortedSet<TasksSetCounter> getCharactersSetsOrderedByAscendingCounter() {
// TODO Auto-generated method stub
return null;
}
} | 4,115 | 30.419847 | 117 | java |
Janus | Janus-master/src/minerful/miner/stats/charsets/TasksSetCounter.java | package minerful.miner.stats.charsets;
import java.util.Collection;
import java.util.Comparator;
import java.util.Set;
import java.util.TreeSet;
import minerful.concept.TaskChar;
public class TasksSetCounter implements Comparable<TasksSetCounter>, Cloneable {
public static class TaskSetByAscendingCounterComparator implements Comparator<TasksSetCounter> {
@Override
public int compare(TasksSetCounter o1, TasksSetCounter o2) {
int result = Integer.valueOf(o1.counter).compareTo(Integer.valueOf(o2.counter));
return (
( result == 0 )
? o1.compareTo(o2)
: result
);
}
}
private final String charactersSetString;
private final TreeSet<TaskChar> taskCharSet;
private int counter;
public TasksSetCounter(TaskChar task) {
this.charactersSetString = String.valueOf(task);
this.taskCharSet = new TreeSet<TaskChar>();
this.taskCharSet.add(task);
this.counter = 0;
}
public TasksSetCounter(Collection<TaskChar> charactersSet) {
String charsImplosion = createCharSetString(charactersSet);
this.charactersSetString = charsImplosion;
this.taskCharSet = new TreeSet<TaskChar>(charactersSet);
this.counter = 0;
}
private String createCharSetString(Collection<TaskChar> charactersSet) {
StringBuilder sBuil = new StringBuilder(charactersSet.size());
for (TaskChar tCh : charactersSet)
sBuil.append(tCh.identifier);
return sBuil.toString();
}
private TasksSetCounter(Collection<TaskChar> taskCharCollection, String charactersSetString, int counter) {
this.counter = counter;
this.taskCharSet = new TreeSet<TaskChar>(taskCharCollection);
this.charactersSetString = createCharSetString(taskCharCollection);
}
public TasksSetCounter(Collection<TaskChar> charactersSet, TaskChar characterOnMore) {
charactersSet.add(characterOnMore);
String charsImplosion = createCharSetString(charactersSet);
this.charactersSetString = charsImplosion;
this.taskCharSet = new TreeSet<TaskChar>(charactersSet);
this.counter = 0;
}
public Set<TaskChar> getTaskCharSet() {
return this.taskCharSet;
}
public Set<TaskChar> getCopyOfCharactersSet() {
return (Set<TaskChar>)(this.taskCharSet.clone());
}
public String getCharactersSetString() {
return charactersSetString;
}
public int getCounter() {
return counter;
}
public int incrementCounter() {
return this.incrementCounter(1);
}
public int incrementCounter(int sum) {
this.counter += sum;
return counter;
}
public int howManyCharactersInSet() {
return this.taskCharSet.size();
}
public boolean isSingleton() {
return howManyCharactersInSet() == 1;
}
@Override
public int compareTo(TasksSetCounter other) {
return this.charactersSetString.compareTo(other.charactersSetString);
}
@Override
public boolean equals(Object other) {
return this.charactersSetString.equals(
((TasksSetCounter)other).getCharactersSetString()
);
}
@Override
protected Object clone() throws CloneNotSupportedException {
return new TasksSetCounter(taskCharSet, this.charactersSetString, this.counter);
}
@Override
public String toString() {
StringBuilder sBuil = new StringBuilder();
sBuil.append('\n');
sBuil.append(this.getClass().getName().substring(this.getClass().getName().lastIndexOf('.')+1));
sBuil.append(": {");
sBuil.append("charactersSetString=");
sBuil.append(this.charactersSetString);
sBuil.append(" => ");
sBuil.append("counter=");
sBuil.append(this.counter);
sBuil.append("}");
return sBuil.toString();
}
} | 3,531 | 27.031746 | 108 | java |
Janus | Janus-master/src/minerful/miner/stats/charsets/VariableCharactersSetFixedCountersCollection.java | package minerful.miner.stats.charsets;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import minerful.concept.TaskChar;
public class VariableCharactersSetFixedCountersCollection extends TaskSetCountersCollection {
private Integer maximumCharactersSetSize;
public VariableCharactersSetFixedCountersCollection() {
this(null);
}
public VariableCharactersSetFixedCountersCollection(
Integer maximumCharactersSetSize) {
this.tasksSetCounterCollection = new TreeSet<TasksSetCounter>();
this.singleTaskIndexer = new TreeMap<TaskChar, TreeSet<TasksSetCounter>>();
this.maximumCharactersSetSize = maximumCharactersSetSize;
}
@Override
protected void reIndex(Set<TaskChar> stuff, TasksSetCounter indexed) {
for (TaskChar chr : stuff) {
if (!this.singleTaskIndexer.containsKey(chr))
this.singleTaskIndexer.put(chr, new TreeSet<TasksSetCounter>());
this.singleTaskIndexer.get(chr).add(indexed);
}
}
public boolean hasIndex(TaskChar indexer) {
return this.getIndexedBy(indexer) != null;
}
public Set<TasksSetCounter> getIndexedBy(TaskChar indexer) {
return this.singleTaskIndexer.get(indexer);
}
public Set<TasksSetCounter> getIndexedByOrInterleave(TaskChar indexer) {
this.interleave(indexer);
return this.singleTaskIndexer.get(indexer);
}
public void storeAndReIndex(TasksSetCounter nuCharactersSetCounter) {
this.tasksSetCounterCollection.add(nuCharactersSetCounter);
this.reIndex(nuCharactersSetCounter.getTaskCharSet(), nuCharactersSetCounter);
}
private boolean interleave(TaskChar indexer) {
return this.interleave(indexer, false);
}
private boolean interleave(TaskChar indexer, boolean inheritCountFromExisting) {
Set<TasksSetCounter> nuCharSetCounters = null;
if (!this.singleTaskIndexer.containsKey(indexer)) {
nuCharSetCounters = new TreeSet<TasksSetCounter>();
// Add the singleton
TasksSetCounter nuCharSetCounter = new TasksSetCounter(indexer);
Set<TaskChar> nuCharSet = nuCharSetCounter.getTaskCharSet();
this.addAndReIndex(nuCharSet, nuCharSetCounter);
// Combine indexer with existing character sets
for(TasksSetCounter existingCharSetCounter : this.tasksSetCounterCollection) {
if (this.maximumCharactersSetSize == null || existingCharSetCounter.howManyCharactersInSet() < this.maximumCharactersSetSize) {
nuCharSet = existingCharSetCounter.getTaskCharSet();
nuCharSet.add(indexer);
nuCharSetCounter = new TasksSetCounter(nuCharSet);
if (inheritCountFromExisting)
nuCharSetCounter.incrementCounter(existingCharSetCounter.getCounter());
nuCharSetCounters.add(nuCharSetCounter);
}
}
for (TasksSetCounter nuCharSetCounterToAdd : nuCharSetCounters)
this.addAndReIndex(nuCharSetCounterToAdd.getTaskCharSet(), nuCharSetCounterToAdd);
return true;
}
return false;
}
private void mergeAndReindex (TasksSetCounter chSetCounter) {
if (this.tasksSetCounterCollection.contains(chSetCounter)) {
TasksSetCounter alreadyExisting = this.tasksSetCounterCollection.tailSet(chSetCounter).first();
alreadyExisting.incrementCounter(chSetCounter.getCounter());
} else {
this.addAndReIndex(chSetCounter.getTaskCharSet(), chSetCounter);
}
}
public void merge (VariableCharactersSetFixedCountersCollection other) {
other = this.prepareForMerging(other);
for (TasksSetCounter chSetCounter : other.tasksSetCounterCollection) {
this.mergeAndReindex(chSetCounter);
}
}
private VariableCharactersSetFixedCountersCollection prepareForMerging(
VariableCharactersSetFixedCountersCollection other) {
// Check this' single-character indexers. If some are missing, wrt other, it means that some characters did not appear in the previous traces.
if (!this.singleTaskIndexer.keySet().containsAll(other.singleTaskIndexer.keySet())) {
// If some are missing, for each of them you must copy and enlarge the existing sets adding the missing characters, including the counter.
for (TaskChar otherIndexer : other.singleTaskIndexer.keySet()) {
this.interleave(otherIndexer, true);
}
}
// The other way round, for other and this
if (!other.singleTaskIndexer.keySet().containsAll(this.singleTaskIndexer.keySet())) {
// If some are missing, for each of them you must copy and enlarge the existing sets adding the missing characters, including the counter.
for (TaskChar thisIndexer : this.singleTaskIndexer.keySet()) {
other.interleave(thisIndexer, true);
}
}
return other;
}
} | 4,602 | 37.680672 | 144 | java |
Janus | Janus-master/src/minerful/miner/stats/xmlenc/DistancesMapAdapter.java | package minerful.miner.stats.xmlenc;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlAdapter;
public class DistancesMapAdapter extends XmlAdapter<DistancesMapAdapter.KeyValueList, Map<Integer, Integer>>{
@XmlType(name="distances")
@XmlAccessorType(XmlAccessType.FIELD)
public static class KeyValueList {
@XmlType(name="distance")
@XmlAccessorType(XmlAccessType.FIELD)
public static class Item {
@XmlAttribute(name="at")
public Integer key;
@XmlElement(name="counted")
public Integer value;
public Item(Integer key, Integer value) {
this.key = key;
this.value = value;
}
public Item() {
}
}
@XmlElements({
@XmlElement(name="distance")
})
public final List<Item> list;
public KeyValueList() {
this.list = new ArrayList<DistancesMapAdapter.KeyValueList.Item>();
}
public KeyValueList(List<Item> list) {
this.list = list;
}
}
@Override
public KeyValueList marshal(
Map<Integer, Integer> v) throws Exception {
Set<Integer> keys = v.keySet();
ArrayList<DistancesMapAdapter.KeyValueList.Item> results = new ArrayList<DistancesMapAdapter.KeyValueList.Item>(v.size());
for (Integer key : keys) {
results.add(new DistancesMapAdapter.KeyValueList.Item(key, v.get(key)));
}
return new DistancesMapAdapter.KeyValueList(results);
}
@Override
public Map<Integer, Integer> unmarshal(
KeyValueList v)
throws Exception {
Map<Integer, Integer> repetitionsMap = new HashMap<Integer, Integer>(v.list.size());
for (DistancesMapAdapter.KeyValueList.Item item : v.list) {
repetitionsMap.put(item.key, item.value);
}
return repetitionsMap;
}
}
| 2,058 | 27.597222 | 124 | java |
Janus | Janus-master/src/minerful/miner/stats/xmlenc/FirstOccurrencesMapAdapter.java | package minerful.miner.stats.xmlenc;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlAdapter;
public class FirstOccurrencesMapAdapter extends XmlAdapter<FirstOccurrencesMapAdapter.KeyValueList, Map<Integer, Integer>>{
@XmlType(name="repetitionsCollection")
@XmlAccessorType(XmlAccessType.FIELD)
public static class KeyValueList {
@XmlType(name="repetitionsCounter")
@XmlAccessorType(XmlAccessType.FIELD)
public static class Item {
@XmlAttribute(name="times")
public Integer key;
@XmlElement(name="counted")
public Integer value;
public Item(Integer key, Integer value) {
this.key = key;
this.value = value;
}
public Item() {
}
}
@XmlElementWrapper(name="firstOccurrencesCounters")
@XmlElement(name="firstOccurrencesCounter")
public final List<Item> list;
public KeyValueList() {
this.list = new ArrayList<FirstOccurrencesMapAdapter.KeyValueList.Item>();
}
public KeyValueList(List<Item> list) {
this.list = list;
}
}
@Override
public KeyValueList marshal(
Map<Integer, Integer> v) throws Exception {
Set<Integer> keys = v.keySet();
ArrayList<FirstOccurrencesMapAdapter.KeyValueList.Item> results = new ArrayList<FirstOccurrencesMapAdapter.KeyValueList.Item>(v.size());
for (Integer key : keys) {
results.add(new FirstOccurrencesMapAdapter.KeyValueList.Item(key, v.get(key)));
}
return new FirstOccurrencesMapAdapter.KeyValueList(results);
}
@Override
public Map<Integer, Integer> unmarshal(
KeyValueList v)
throws Exception {
Map<Integer, Integer> repetitionsMap = new HashMap<Integer, Integer>(v.list.size());
for (FirstOccurrencesMapAdapter.KeyValueList.Item item : v.list) {
repetitionsMap.put(item.key, item.value);
}
return repetitionsMap;
}
}
| 2,191 | 29.873239 | 138 | java |
Janus | Janus-master/src/minerful/miner/stats/xmlenc/GlobalStatsMapAdapter.java | package minerful.miner.stats.xmlenc;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlAdapter;
import minerful.concept.TaskChar;
import minerful.miner.stats.LocalStatsWrapper;
public class GlobalStatsMapAdapter extends XmlAdapter<GlobalStatsMapAdapter.KeyValueList, Map<TaskChar, LocalStatsWrapper>>{
@XmlType(name="stats")
@XmlAccessorType(XmlAccessType.FIELD)
public static class KeyValueList {
@XmlType(name="stat")
@XmlAccessorType(XmlAccessType.FIELD)
public static class Item {
@XmlElement(name="task")
public TaskChar key;
@XmlElement(name="details")
public LocalStatsWrapper value;
public Item(TaskChar key, LocalStatsWrapper value) {
this.key = key;
this.value = value;
}
public Item() {
}
}
@XmlElements({
@XmlElement(name="stats")
})
public final List<Item> list;
public KeyValueList() {
this.list = new ArrayList<GlobalStatsMapAdapter.KeyValueList.Item>();
}
public KeyValueList(List<Item> list) {
this.list = list;
}
}
@Override
public GlobalStatsMapAdapter.KeyValueList marshal(
Map<TaskChar, LocalStatsWrapper> v) throws Exception {
Set<TaskChar> keys = v.keySet();
ArrayList<GlobalStatsMapAdapter.KeyValueList.Item> results = new ArrayList<GlobalStatsMapAdapter.KeyValueList.Item>(v.size());
for (TaskChar key : keys) {
results.add(new GlobalStatsMapAdapter.KeyValueList.Item(key, v.get(key)));
}
return new KeyValueList(results);
}
@Override
public Map<TaskChar, LocalStatsWrapper> unmarshal(
GlobalStatsMapAdapter.KeyValueList v)
throws Exception {
Map<TaskChar, LocalStatsWrapper> globalStatsMap = new HashMap<TaskChar, LocalStatsWrapper>(v.list.size());
for (GlobalStatsMapAdapter.KeyValueList.Item keyValue : v.list) {
globalStatsMap.put(keyValue.key, keyValue.value);
}
return globalStatsMap;
}
}
| 2,211 | 28.891892 | 128 | java |
Janus | Janus-master/src/minerful/miner/stats/xmlenc/LocalStatsMapAdapter.java | package minerful.miner.stats.xmlenc;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlAdapter;
import minerful.concept.TaskChar;
import minerful.miner.stats.StatsCell;
public class LocalStatsMapAdapter extends XmlAdapter<LocalStatsMapAdapter.KeyValueList, Map<TaskChar, StatsCell>>{
@XmlType(name="localStats")
@XmlAccessorType(XmlAccessType.FIELD)
public static class KeyValueList {
@XmlType(name="localStat")
@XmlAccessorType(XmlAccessType.FIELD)
public static class Item {
@XmlElement
public TaskChar key;
@XmlElement(name="details")
public StatsCell value;
public Item(TaskChar key, StatsCell value) {
this.key = key;
this.value = value;
}
public Item() {
}
}
@XmlElements({
@XmlElement(name="interplayStatsWith"),
})
public final List<Item> list;
public KeyValueList() {
this.list = new ArrayList<LocalStatsMapAdapter.KeyValueList.Item>();
}
public KeyValueList(List<Item> list) {
this.list = list;
}
}
@Override
public LocalStatsMapAdapter.KeyValueList marshal(
Map<TaskChar, StatsCell> v) throws Exception {
Set<TaskChar> keys = v.keySet();
ArrayList<LocalStatsMapAdapter.KeyValueList.Item> results = new ArrayList<LocalStatsMapAdapter.KeyValueList.Item>(v.size());
for (TaskChar key : keys) {
results.add(new LocalStatsMapAdapter.KeyValueList.Item(key, v.get(key)));
}
return new KeyValueList(results);
}
@Override
public Map<TaskChar, StatsCell> unmarshal(
LocalStatsMapAdapter.KeyValueList v)
throws Exception {
Map<TaskChar, StatsCell> globalStatsMap = new HashMap<TaskChar, StatsCell>(v.list.size());
for (LocalStatsMapAdapter.KeyValueList.Item keyValue : v.list) {
globalStatsMap.put(keyValue.key, keyValue.value);
}
return globalStatsMap;
}
}
| 2,149 | 28.054054 | 126 | java |
Janus | Janus-master/src/minerful/miner/stats/xmlenc/RepetitionsMapAdapter.java | package minerful.miner.stats.xmlenc;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.XmlAdapter;
public class RepetitionsMapAdapter extends XmlAdapter<RepetitionsMapAdapter.KeyValueList, Map<Integer, Integer>>{
@XmlType(name="repetitionsCollection")
@XmlAccessorType(XmlAccessType.FIELD)
public static class KeyValueList {
@XmlType(name="repetitionsCounter")
@XmlAccessorType(XmlAccessType.FIELD)
public static class Item {
@XmlAttribute(name="times")
public Integer key;
@XmlElement(name="counted")
public Integer value;
public Item(Integer key, Integer value) {
this.key = key;
this.value = value;
}
public Item() {
}
}
@XmlElementWrapper(name="repetitionsCounters")
@XmlElement(name="repetitionsCounter")
public final List<Item> list;
public KeyValueList() {
this.list = new ArrayList<RepetitionsMapAdapter.KeyValueList.Item>();
}
public KeyValueList(List<Item> list) {
this.list = list;
}
}
@Override
public KeyValueList marshal(
Map<Integer, Integer> v) throws Exception {
Set<Integer> keys = v.keySet();
ArrayList<RepetitionsMapAdapter.KeyValueList.Item> results = new ArrayList<RepetitionsMapAdapter.KeyValueList.Item>(v.size());
for (Integer key : keys) {
results.add(new RepetitionsMapAdapter.KeyValueList.Item(key, v.get(key)));
}
return new RepetitionsMapAdapter.KeyValueList(results);
}
@Override
public Map<Integer, Integer> unmarshal(
KeyValueList v)
throws Exception {
Map<Integer, Integer> repetitionsMap = new HashMap<Integer, Integer>(v.list.size());
for (RepetitionsMapAdapter.KeyValueList.Item item : v.list) {
repetitionsMap.put(item.key, item.value);
}
return repetitionsMap;
}
}
| 2,141 | 29.169014 | 128 | java |
Janus | Janus-master/src/minerful/params/InputLogCmdParameters.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.params;
import java.io.File;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class InputLogCmdParameters extends ParamsManager {
public static final EventClassification DEFAULT_EVENT_CLASSIFICATION = EventClassification.name;
public static final InputEncoding DEFAULT_INPUT_ENCODING = InputEncoding.xes;
public static final String INPUT_LOGFILE_PATH_PARAM_NAME = "iLF";
public static final String INPUT_LOG_ENCODING_PARAM_NAME = "iLE";
public static final String EVENT_CLASSIFICATION_PARAM_NAME = "iLClassif";
public static final String INPUT_LOGFILE_PATH_LONG_PARAM_NAME = "in-log-file";
public static final String INPUT_ENC_PARAM_LONG_NAME = "in-log-encoding";
public static final String EVENT_CLASSIFICATION_LONG_PARAM_NAME = "in-log-evt-classifier";
public static final String START_FROM_TRACE_PARAM_NAME = "iLStartAt";
public static final Integer FIRST_TRACE_NUM = 0;
public static final String SUB_LOG_SIZE_PARAM_NAME = "iLSubLen";
public static final Integer WHOLE_LOG_LENGTH = 0;
public enum InputEncoding {
/**
* For XES logs (also compressed)
*/
xes,
/**
* For MXML logs (also compressed)
*/
mxml,
/**
* For string-encoded traces, where each character is assumed to be a task symbol
*/
strings;
}
public enum EventClassification {
name, logspec
}
/** Encoding language for the input event log (see enum {@link minerful.params.InputLogCmdParameters.InputEncoding InputEncoding}). Default is: {@link minerful.params.InputLogCmdParameters.InputEncoding#xes InputEncoding.xes}.*/
public InputEncoding inputLanguage;
/** Classification policy to relate events to event classes, that is the task names (see enum {@link minerful.params.InputLogCmdParameters.EventClassification EventClassification}). Default is: {@link minerful.params.InputLogCmdParameters.EventClassification#name EventClassification.name}.*/
public EventClassification eventClassification;
/** Input event log file. It must not be <code>null</code>. */
public File inputLogFile;
/** Number of the trace to start the analysis from */
public Integer startFromTrace;
/** Length of the sub-sequence of traces to analyse */
public Integer subLogLength;
public InputLogCmdParameters() {
super();
inputLanguage = DEFAULT_INPUT_ENCODING;
eventClassification = DEFAULT_EVENT_CLASSIFICATION;
this.startFromTrace = FIRST_TRACE_NUM;
this.subLogLength = WHOLE_LOG_LENGTH;
inputLogFile = null;
}
public InputLogCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public InputLogCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.inputLogFile = openInputFile(line, INPUT_LOGFILE_PATH_PARAM_NAME);
this.inputLanguage = InputEncoding.valueOf(
line.getOptionValue(
INPUT_LOG_ENCODING_PARAM_NAME,
this.inputLanguage.toString()
)
);
this.eventClassification = EventClassification.valueOf(
line.getOptionValue(
EVENT_CLASSIFICATION_PARAM_NAME,
this.eventClassification.toString()
)
);
this.startFromTrace = Integer.valueOf(
line.getOptionValue(
START_FROM_TRACE_PARAM_NAME,
this.startFromTrace.toString()
)
);
this.subLogLength = Integer.valueOf(
line.getOptionValue(
SUB_LOG_SIZE_PARAM_NAME,
this.subLogLength.toString()
)
);
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt: myOptions.getOptions())
options.addOption((Option)myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(INPUT_LOG_ENCODING_PARAM_NAME)
.hasArg().argName("language")
.longOpt(INPUT_ENC_PARAM_LONG_NAME)
.desc("input encoding language " + printValues(InputEncoding.values())
+ printDefault(fromEnumValueToString(DEFAULT_INPUT_ENCODING)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(EVENT_CLASSIFICATION_PARAM_NAME)
.hasArg().argName("class")
.longOpt(EVENT_CLASSIFICATION_LONG_PARAM_NAME)
.desc("event classification (resp., by activity name, or according to the log-specified pattern) " + printValues(EventClassification.values())
+ printDefault(fromEnumValueToString(DEFAULT_EVENT_CLASSIFICATION)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_LOGFILE_PATH_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt(INPUT_LOGFILE_PATH_LONG_PARAM_NAME)
.desc("path to read the log file from")
.type(String.class)
.build()
);
options.addOption(
Option.builder(START_FROM_TRACE_PARAM_NAME)
.hasArg().argName("number")
.longOpt("start-from-trace")
.desc("ordinal number of the trace from which the analysed sub-log should start"
+ printDefault(FIRST_TRACE_NUM))
.type(Long.class)
.build()
);
options.addOption(
Option.builder(SUB_LOG_SIZE_PARAM_NAME)
.hasArg().argName("length")
.longOpt("sub-log-size")
.desc("number of traces to be analysed in the sub-log. To have the entire log analysed, leave the default value"
+ printDefault(WHOLE_LOG_LENGTH))
.type(Long.class)
.build()
);
return options;
}
} | 6,296 | 35.824561 | 293 | java |
Janus | Janus-master/src/minerful/params/ParamsManager.java | package minerful.params;
import java.io.File;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import minerful.utils.MessagePrinter;
public abstract class ParamsManager {
public static final String EXPERIMENTAL_DEVELOPMENT_STAGE_MESSAGE =
"*** WARNING: experimental development stage of implementation!";
private static final int DEFAULT_PROMPT_WIDTH = 160;
protected HelpFormatter helpFormatter = new HelpFormatter();
public static final String ARRAY_TOKENISER_SEPARATOR = MessagePrinter.ARRAY_TOKENISER_SEPARATOR;
public ParamsManager() {
helpFormatter.setWidth(DEFAULT_PROMPT_WIDTH);
}
public void printHelp() {
this.printHelp(this.listParseableOptions());
}
public void printHelp(Options options) {
helpFormatter.printHelp("cmd_name", options, true);
}
public void printHelpForWrongUsage(String errorMessage, Options options) {
System.err.println("Wrong usage: " + errorMessage);
this.printHelp(options);
}
public void printHelpForWrongUsage(String errorMessage) {
this.printHelpForWrongUsage(errorMessage, this.listParseableOptions());
}
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt : myOptions.getOptions()) {
options.addOption((Option) myOpt);
}
return options;
}
protected void parseAndSetup(Options otherOptions, String[] args) {
// create the command line parser
CommandLineParser parser = new PosixParser();
Options options = addParseableOptions(otherOptions);
try {
CommandLine line = parser.parse(options, args, false);
setup(line);
} catch (ParseException exp) {
System.err.println("Unexpected exception:" + exp.getMessage());
}
}
public Options listParseableOptions() {
return parseableOptions();
}
protected File openInputFile(CommandLine line, String paramName) {
File inpuFile = null;
if (!line.hasOption(paramName))
return inpuFile;
String inputFilePath = line.getOptionValue(paramName);
if (inputFilePath != null) {
inpuFile = new File(inputFilePath);
if ( !inpuFile.exists()
|| !inpuFile.canRead()
|| !inpuFile.isFile()) {
throw new IllegalArgumentException("Unreadable file: " + inputFilePath);
}
}
return inpuFile;
}
protected File openOutputFile(CommandLine line, String paramName) {
if (!line.hasOption(paramName))
return null;
File outpuFile = new File(line.getOptionValue(paramName));
if (outpuFile != null) {
if (outpuFile.isDirectory()) {
throw new IllegalArgumentException("Unwritable file: " + outpuFile + " is a directory!");
}
}
return outpuFile;
}
protected File openOutputDir(CommandLine line, String paramName) {
File inpuDir = null;
if (!line.hasOption(paramName))
return inpuDir;
String inputDirPath = line.getOptionValue(paramName);
if (inputDirPath != null) {
inpuDir = new File(inputDirPath);
if ( !inpuDir.exists()
|| !inpuDir.canWrite()
|| !inpuDir.isDirectory()) {
throw new IllegalArgumentException("Unaccessible directory: " + inputDirPath);
}
}
return inpuDir;
}
/**
* Meant to be hidden by extending classes!
*/
private static Options parseableOptions() {
return new Options();
}
protected abstract void setup(CommandLine line);
protected static String fromStringToEnumValue(String token) {
if (token != null)
return token.trim().toUpperCase().replace("-", "_");
return null;
}
protected static String[] tokenise(String paramString) {
return MessagePrinter.tokenise(paramString);
}
public static String printDefault(Object defaultValue) {
return ".\nDefault is: '" + defaultValue.toString() + "'";
}
protected static String attachInstabilityWarningToDescription(String description) {
return EXPERIMENTAL_DEVELOPMENT_STAGE_MESSAGE + "\n" + description;
}
public static String printValues(Object... values) {
return MessagePrinter.printValues(values);
}
public static String fromEnumValueToString(Object token) {
return MessagePrinter.fromEnumValueToString(token);
}
public static String fromEnumValuesToTokenJoinedString(Object... tokens) {
return MessagePrinter.fromEnumValuesToTokenJoinedString(tokens);
}
} | 4,841 | 31.066225 | 97 | java |
Janus | Janus-master/src/minerful/params/SlidingCmdParameters.java | package minerful.params;
import java.io.File;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class SlidingCmdParameters extends ParamsManager {
public static final String SLIDING_STEP_PARAM_NAME = "sliBy";
public static final int DEFAULT_SLIDING_STEP = 1;
public static final String STICK_TAIL_PARAM_NAME = "sliStick";
public static final boolean DEFAULT_STICKY_TAIL_POLICY = false;
public static final String INTERMEDIATE_OUTPUT_PARAM_NAME = "sliOut";
/** Sets how long is the step to slide the window on the event log. The default is {@link SlidingCmdParameters#DEFAULT_SLIDING_STEP DEFAULT_SLIDING_STEP} */
public Integer slidingStep;
/** The file where to store as a CSV file the constraints' support while MINERful slides over the traces. */
public File intermediateOutputCsvFile;
/** Determines whether to stick the tail at the beginning, so that the sliding corresponds to the expansion of the window. The default is {@link SlidingCmdParameters#DEFAULT_STICKY_TAIL_POLICY DEFAULT_STICKY_TAIL_POLICY} */
public Boolean stickTail;
public SlidingCmdParameters() {
super();
slidingStep = DEFAULT_SLIDING_STEP;
intermediateOutputCsvFile = null;
stickTail = DEFAULT_STICKY_TAIL_POLICY;
}
public SlidingCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public SlidingCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.slidingStep =
Integer.valueOf(line.getOptionValue(
SLIDING_STEP_PARAM_NAME,
this.slidingStep.toString()
)
);
if (slidingStep < 0) {
throw new IllegalArgumentException("The sliding window step should be an integer higher than, or equal to, 0");
}
this.intermediateOutputCsvFile = openOutputFile(line, INTERMEDIATE_OUTPUT_PARAM_NAME);
this.stickTail = line.hasOption(STICK_TAIL_PARAM_NAME);
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(SLIDING_STEP_PARAM_NAME)
.hasArg().argName("num")
.longOpt("slide-by")
.desc("sliding window step, in number of traces (must be higher than 0)" + printDefault(DEFAULT_SLIDING_STEP))
.type(Integer.class)
.build()
);
options.addOption(
Option.builder(STICK_TAIL_PARAM_NAME)
.longOpt("stick-tail")
.desc("block the tail and slide only the head (increasing the window length at every step)" + printDefault(DEFAULT_STICKY_TAIL_POLICY))
.build()
);
options.addOption(
Option.builder(INTERMEDIATE_OUTPUT_PARAM_NAME)
.hasArg().argName("file")
.required(true)
.longOpt("sliding-results-out")
.desc("path of the file in which the values of constraints' measures are written")
.build()
);
return options;
}
}
| 3,232 | 34.922222 | 227 | java |
Janus | Janus-master/src/minerful/params/SystemCmdParameters.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.params;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class SystemCmdParameters extends ParamsManager {
private static final DebugLevel DEFAULT_DEBUG_LEVEL = DebugLevel.info;
public enum DebugLevel {
none, info, debug, trace, all;
}
public static final char DEBUG_PARAM_NAME = 'd';
public static final char HELP_PARAM_NAME = 'h';
/** Desired level of debugging (see enum {@link minerful.params.SystemCmdParameters.DebugLevel DebugLevel}) */
public DebugLevel debugLevel;
/** Set this variable to <code>true</code> to print out a help screen */
public Boolean help;
public SystemCmdParameters() {
super();
debugLevel = DEFAULT_DEBUG_LEVEL;
help = false;
}
public SystemCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public SystemCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.help =
line.hasOption(SystemCmdParameters.HELP_PARAM_NAME);
this.debugLevel = DebugLevel.valueOf(
line.getOptionValue(
DEBUG_PARAM_NAME,
this.debugLevel.toString()
)
);
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt: myOptions.getOptions())
options.addOption((Option)myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(String.valueOf(SystemCmdParameters.HELP_PARAM_NAME))
.longOpt("help")
.desc("print help")
.build()
);
options.addOption(
Option.builder(String.valueOf(SystemCmdParameters.DEBUG_PARAM_NAME))
.hasArg().argName("debug_level")
.longOpt("debug")
.desc("debug level " + printValues(DebugLevel.values())
+ printDefault(fromEnumValueToString(DEFAULT_DEBUG_LEVEL))
)
.type(Integer.class)
.build()
);
return options;
}
} | 2,551 | 28 | 111 | java |
Janus | Janus-master/src/minerful/params/ViewCmdParameters.java | package minerful.params;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class ViewCmdParameters extends ParamsManager {
public static enum ConstraintsSorting {
type, support, interest;
}
public static final String MACHINE_READABLE_RESULTS_PARAM_NAME = "vMachine";
public static final String CONSTRAINTS_SORTING_TYPE_PARAM_NAME = "vSort";
public static final String CONSTRAINTS_NO_FOLDING_PARAM_NAME = "vNoFold";
public static final String SUPPRESS_SCREEN_PRINT_OUT_PARAM_NAME = "vShush";
public static final Boolean DEFAULT_DO_MACHINE_READABLE_RESULTS = false;
public static final ConstraintsSorting DEFAULT_CONSTRAINTS_SORTING_TYPE = ConstraintsSorting.type;
public static final Boolean DEFAULT_DO_CONSTRAINTS_NO_FOLDING = false;
public static final Boolean DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT = false;
/** Set this field to <code>true</code> to print a machine-readable list of supports, for each constraint template and constrained activities. */
public Boolean machineReadableResults;
/** How to sort constraints in the print-out of results (see enum {@link minerful.params.ConstraintsSorting ConstraintsSorting}). Default is: {@link minerful.params.ConstraintsSorting#property ConstraintsSorting.type}. */
public ConstraintsSorting constraintsSorting;
/** Set this field to <code>true</code> to avoid the discovered constraints to be folded under activation tasks in the print-out. */
public Boolean noFoldingRequired;
/** Set this field to <code>true</code> to avoid the discovered constraints to be printed out on screen. */
public Boolean suppressScreenPrintOut;
/**
*
*/
public ViewCmdParameters() {
super();
machineReadableResults = DEFAULT_DO_MACHINE_READABLE_RESULTS;
constraintsSorting = DEFAULT_CONSTRAINTS_SORTING_TYPE;
noFoldingRequired = DEFAULT_DO_CONSTRAINTS_NO_FOLDING;
suppressScreenPrintOut = DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT;
}
public ViewCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public ViewCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.constraintsSorting =
Enum.valueOf(ConstraintsSorting.class,
line.getOptionValue(
CONSTRAINTS_SORTING_TYPE_PARAM_NAME,
this.constraintsSorting.toString()
)
);
this.machineReadableResults = line.hasOption(MACHINE_READABLE_RESULTS_PARAM_NAME);
this.noFoldingRequired = line.hasOption(CONSTRAINTS_NO_FOLDING_PARAM_NAME);
this.suppressScreenPrintOut = line.hasOption(SUPPRESS_SCREEN_PRINT_OUT_PARAM_NAME);
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(CONSTRAINTS_SORTING_TYPE_PARAM_NAME)
.hasArg().argName("type")
.longOpt("sort-constraints")
.desc("Sorting policy for constraints of the discovered process: " + printValues(ConstraintsSorting.values()) +
printDefault(DEFAULT_CONSTRAINTS_SORTING_TYPE))
.type(String.class)
.build()
);
options.addOption(
Option.builder(CONSTRAINTS_NO_FOLDING_PARAM_NAME)
.longOpt("no-folding")
.desc("avoid the discovered constraints to be folded under activation tasks" +
printDefault(DEFAULT_DO_CONSTRAINTS_NO_FOLDING))
.build()
);
options.addOption(
Option.builder(MACHINE_READABLE_RESULTS_PARAM_NAME)
.longOpt("machine-readable")
.desc("print a machine-readable list of supports, for each constraint template and constrained activities in the print-out" +
printDefault(DEFAULT_DO_MACHINE_READABLE_RESULTS))
.build()
);
options.addOption(
Option.builder(SUPPRESS_SCREEN_PRINT_OUT_PARAM_NAME)
.longOpt("no-screen-print-out")
.desc("suppresses the print-out of discovered constraints on screen" +
printDefault(DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT))
.build()
);
return options;
}
}
| 4,334 | 40.285714 | 225 | java |
Janus | Janus-master/src/minerful/postprocessing/params/PostProcessingCmdParameters.java | package minerful.postprocessing.params;
import java.io.File;
import java.util.ArrayList;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import minerful.index.comparator.modular.ConstraintSortingPolicy;
import minerful.params.ParamsManager;
import minerful.postprocessing.pruning.SubsumptionHierarchyMarkingPolicy;
public class PostProcessingCmdParameters extends ParamsManager {
/**
* Specifies the type of post-processing analysis, through which getting rid of redundancies or conflicts in the process model.
* @author Claudio Di Ciccio
*/
public static enum PostProcessingAnalysisType {
/** No post-processing analysis. */
NONE,
/** Hierarchical subsumption pruning of constraints. */
HIERARCHY, // default
/** Hierarchical subsumption pruning of constraints and conflicts check. */
HIERARCHYCONFLICT,
/** Hierarchical subsumption pruning of constraints, conflicts check, and single-pass automata-based redundancy elimination. */
HIERARCHYCONFLICTREDUNDANCY,
/** Hierarchical subsumption pruning of constraints, conflicts check, and double-pass automata-based redundancy elimination. */
HIERARCHYCONFLICTREDUNDANCYDOUBLE;
public String getDescription() {
switch(this) {
case HIERARCHY:
return "Template-hierarchy based simplification";
case HIERARCHYCONFLICT:
return "Template-hierarchy based simplification plus conflict check";
case HIERARCHYCONFLICTREDUNDANCY:
return "Template-hierarchy based simplification plus conflict and redundancy check";
case HIERARCHYCONFLICTREDUNDANCYDOUBLE:
return "Template-hierarchy based simplification plus conflict and double-pass redundancy check";
case NONE:
default:
return "No simplification";
}
}
public boolean isPostProcessingRequested() {
switch(this) {
case NONE:
return false;
default:
return true;
}
}
public boolean isHierarchySubsumptionResolutionRequested() {
switch(this) {
case HIERARCHY:
case HIERARCHYCONFLICT:
case HIERARCHYCONFLICTREDUNDANCY:
case HIERARCHYCONFLICTREDUNDANCYDOUBLE:
return true;
default:
return false;
}
}
public boolean isConflictResolutionRequested() {
switch(this) {
case HIERARCHYCONFLICT:
case HIERARCHYCONFLICTREDUNDANCY:
case HIERARCHYCONFLICTREDUNDANCYDOUBLE:
return true;
default:
return false;
}
}
public boolean isRedundancyResolutionRequested() {
switch(this) {
case HIERARCHYCONFLICTREDUNDANCY:
case HIERARCHYCONFLICTREDUNDANCYDOUBLE:
return true;
default:
return false;
}
}
public boolean isRedundancyResolutionDoubleCheckRequested() {
switch(this) {
case HIERARCHYCONFLICTREDUNDANCYDOUBLE:
return true;
default:
return false;
}
}
}
public static final String ANALYSIS_TYPE_PARAM_NAME = "prune";
public static final String RANKING_POLICY_PARAM_NAME = "pruneRnk";
// public static final String HIERARCHY_SUBSUMPTION_PRUNING_POLICY_PARAM_NAME = "ppHSPP"; // TODO One day
public static final String KEEP_CONSTRAINTS_PARAM_NAME = "keep";
public static final String KEEP_MODEL_PARAM_NAME = "keepModel";
public static final char SUPPORT_THRESHOLD_PARAM_NAME = 's';
public static final char INTEREST_THRESHOLD_PARAM_NAME = 'i';
public static final char CONFIDENCE_THRESHOLD_PARAM_NAME = 'c';
public static final Double DEFAULT_SUPPORT_THRESHOLD = 0.95;
public static final Double DEFAULT_INTEREST_FACTOR_THRESHOLD = 0.125;
public static final Double DEFAULT_CONFIDENCE_THRESHOLD = 0.25;
public static final PostProcessingAnalysisType DEFAULT_POST_PROCESSING_ANALYSIS_TYPE = PostProcessingAnalysisType.HIERARCHY;
public static final HierarchySubsumptionPruningPolicy DEFAULT_HIERARCHY_POLICY = HierarchySubsumptionPruningPolicy.SUPPORTHIERARCHY;
public static final boolean DEFAULT_REDUNDANT_INCONSISTENT_CONSTRAINTS_KEEPING_POLICY = false;
/** Policies according to which constraints are ranked in terms of significance. The position in the array reflects the order with which the policies are used. When a criterion does not establish which constraint in a pair should be put ahead in the ranking, the following in the array is utilised. Default value is {@link #DEFAULT_PRIORITY_POLICIES DEFAULT_PRIORITY_POLICIES}. */
public ConstraintSortingPolicy[] sortingPolicies; // mandatory assignment
/** Type of post-processing analysis required. Default value is {@link #DEFAULT_POST_PROCESSING_ANALYSIS_TYPE DEFAULT_ANALYSIS_TYPE}. */
public PostProcessingAnalysisType postProcessingAnalysisType;
/** Ignore this: it is still unused -- Policies according to which constraints are ranked in terms of significance. Default value is {@link #DEFAULT_HIERARCHY_POLICY DEFAULT_HIERARCHY_POLICY}. */
public HierarchySubsumptionPruningPolicy hierarchyPolicy;
/** Minimum support threshold required to consider a discovered constraint significant. Default value is {@link #DEFAULT_SUPPORT_THRESHOLD DEFAULT_SUPPORT_THRESHOLD}. */
public Double supportThreshold;
/** Minimum confidence level threshold required to consider a discovered constraint significant. Default value is {@link #DEFAULT_CONFIDENCE_THRESHOLD DEFAULT_CONFIDENCE_THRESHOLD}. */
public Double confidenceThreshold;
/** Minimum interest factor threshold required to consider a discovered constraint significant. Default value is {@link #DEFAULT_INTEREST_FACTOR_THRESHOLD DEFAULT_INTEREST_FACTOR_THRESHOLD}. */
public Double interestFactorThreshold;
/** Specifies whether the redundant or inconsistent constraints should be only marked as such (<code>false</code>), hence hidden, or cropped (removed) from the model (<code>true</code>) */
public boolean cropRedundantAndInconsistentConstraints;
/** JSON File contining a model (set of constraints) that must not be removed in the pruning */
public File fixpointModel;
public static final ConstraintSortingPolicy[] DEFAULT_PRIORITY_POLICIES = new ConstraintSortingPolicy[] {
ConstraintSortingPolicy.ACTIVATIONTARGETBONDS,
ConstraintSortingPolicy.FAMILYHIERARCHY,
ConstraintSortingPolicy.SUPPORTCONFIDENCEINTERESTFACTOR,
};
public PostProcessingCmdParameters() {
super();
this.sortingPolicies = DEFAULT_PRIORITY_POLICIES;
this.postProcessingAnalysisType = DEFAULT_POST_PROCESSING_ANALYSIS_TYPE;
this.hierarchyPolicy = DEFAULT_HIERARCHY_POLICY;
this.supportThreshold = DEFAULT_SUPPORT_THRESHOLD;
this.confidenceThreshold = DEFAULT_CONFIDENCE_THRESHOLD;
this.interestFactorThreshold = DEFAULT_INTEREST_FACTOR_THRESHOLD;
this.cropRedundantAndInconsistentConstraints = !DEFAULT_REDUNDANT_INCONSISTENT_CONSTRAINTS_KEEPING_POLICY;
}
public static PostProcessingCmdParameters makeParametersForNoPostProcessing() {
PostProcessingCmdParameters noPostProcessParams = new PostProcessingCmdParameters();
noPostProcessParams.postProcessingAnalysisType = PostProcessingAnalysisType.NONE;
noPostProcessParams.hierarchyPolicy = HierarchySubsumptionPruningPolicy.NONE;
noPostProcessParams.supportThreshold = 0.0;
noPostProcessParams.confidenceThreshold = 0.0;
noPostProcessParams.interestFactorThreshold = 0.0;
noPostProcessParams.cropRedundantAndInconsistentConstraints = DEFAULT_REDUNDANT_INCONSISTENT_CONSTRAINTS_KEEPING_POLICY;
return noPostProcessParams;
}
public PostProcessingCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public PostProcessingCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.supportThreshold = Double.valueOf(
line.getOptionValue(
SUPPORT_THRESHOLD_PARAM_NAME,
this.supportThreshold.toString()
)
);
this.interestFactorThreshold = Double.valueOf(
line.getOptionValue(
INTEREST_THRESHOLD_PARAM_NAME,
this.interestFactorThreshold.toString()
)
);
this.confidenceThreshold = Double.valueOf(
line.getOptionValue(
CONFIDENCE_THRESHOLD_PARAM_NAME,
this.confidenceThreshold.toString()
)
);
if (line.hasOption(KEEP_CONSTRAINTS_PARAM_NAME)) {
this.cropRedundantAndInconsistentConstraints = false;
} else {
this.cropRedundantAndInconsistentConstraints = !DEFAULT_REDUNDANT_INCONSISTENT_CONSTRAINTS_KEEPING_POLICY;
}
String analysisTypeString = line.getOptionValue(ANALYSIS_TYPE_PARAM_NAME);
if (analysisTypeString != null && !analysisTypeString.isEmpty()) {
try {
this.postProcessingAnalysisType = PostProcessingAnalysisType.valueOf(fromStringToEnumValue(analysisTypeString));
} catch (Exception e) {
System.err.println("Invalid option for " + ANALYSIS_TYPE_PARAM_NAME + ": " + analysisTypeString + ". Using default value.");
}
}
if(line.hasOption(KEEP_MODEL_PARAM_NAME)){
this.fixpointModel =openInputFile(line, KEEP_MODEL_PARAM_NAME);
}
this.updateRankingPolicies(line.getOptionValue(RANKING_POLICY_PARAM_NAME));
}
private void updateRankingPolicies(String paramString) {
String[] tokens = tokenise(paramString);
if (tokens == null)
return;
ArrayList<ConstraintSortingPolicy> listOfPolicies = new ArrayList<ConstraintSortingPolicy>(tokens.length);
ConstraintSortingPolicy policy = null;
for (String token : tokens) {
token = fromStringToEnumValue(token);
try {
policy = ConstraintSortingPolicy.valueOf(token);
} catch (Exception e) {
System.err.println("Invalid option for " + RANKING_POLICY_PARAM_NAME + ": " + token + " is going to be ignored.");
}
listOfPolicies.add(policy);
}
if (listOfPolicies.size() > 0) {
this.sortingPolicies = listOfPolicies.toArray(new ConstraintSortingPolicy[0]);
} else {
System.err.println("No valid option for " + RANKING_POLICY_PARAM_NAME + ". Using default value.");
}
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(ANALYSIS_TYPE_PARAM_NAME)
.hasArg().argName("type")
.longOpt("prune-with")
.desc("type of post-processing analysis over constraints. It can be one of the following: " + printValues(PostProcessingAnalysisType.values())
+ printDefault(fromEnumValueToString(DEFAULT_POST_PROCESSING_ANALYSIS_TYPE)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(RANKING_POLICY_PARAM_NAME)
.hasArg().argName("policy")
.longOpt("prune-ranking-by")
.desc("type of ranking of constraints for post-processing analysis. It can be a " + ARRAY_TOKENISER_SEPARATOR + "-separated list of the following: " + printValues(ConstraintSortingPolicy.values())
+ printDefault(fromEnumValuesToTokenJoinedString(DEFAULT_PRIORITY_POLICIES)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(String.valueOf(SUPPORT_THRESHOLD_PARAM_NAME))
.hasArg().argName("threshold")
.longOpt("support")
.desc("threshold for support (reliability); it must be a real value ranging from 0.0 to 1.0"
+ printDefault(DEFAULT_SUPPORT_THRESHOLD))
.type(Double.class)
.build()
);
options.addOption(
Option.builder(String.valueOf(CONFIDENCE_THRESHOLD_PARAM_NAME))
.hasArg().argName("threshold")
.longOpt("confidence")
.desc("threshold for confidence level (relevance); it must be a real value ranging from 0.0 to 1.0"
+ printDefault(DEFAULT_CONFIDENCE_THRESHOLD))
.type(Double.class)
.build()
);
options.addOption(
Option.builder(String.valueOf(INTEREST_THRESHOLD_PARAM_NAME))
.hasArg().argName("threshold")
.longOpt("interest-factor")
.desc("threshold for interest factor (relevance); it must be a real value ranging from 0.0 to 1.0"
+ printDefault(DEFAULT_INTEREST_FACTOR_THRESHOLD))
.type(Double.class)
.build()
);
options.addOption(
Option.builder(KEEP_CONSTRAINTS_PARAM_NAME)
.longOpt("keep-constraints")
.desc("do not physically remove the redundant or inconsistent constraints from the model")
.type(Boolean.class)
.build()
);
options.addOption(
Option.builder(KEEP_MODEL_PARAM_NAME)
.hasArg().argName("path")
.longOpt("keep-model")
.desc("path to read a file containing the fixpoint model (encoding must be equal to the input model")
.type(String.class)
.build()
);
return options;
}
// TODO Still unused
public static enum HierarchySubsumptionPruningPolicy {
NONE,
HIERARCHY,
SUPPORTHIERARCHY; // default
public SubsumptionHierarchyMarkingPolicy translate() {
switch(this) {
case HIERARCHY:
return SubsumptionHierarchyMarkingPolicy.EAGER_ON_HIERARCHY_OVER_SUPPORT;
case SUPPORTHIERARCHY:
default:
return SubsumptionHierarchyMarkingPolicy.EAGER_ON_SUPPORT_OVER_HIERARCHY;
}
}
}
} | 13,113 | 39.350769 | 381 | java |
Janus | Janus-master/src/minerful/postprocessing/pruning/ConflictAndRedundancyResolver.java | package minerful.postprocessing.pruning;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.ListIterator;
import java.util.Set;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import dk.brics.automaton.Automaton;
import dk.brics.automaton.RegExp;
import minerful.automaton.AutomatonFactory;
import minerful.concept.ProcessModel;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintFamily.RelationConstraintSubFamily;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.relation.MutualRelationConstraint;
import minerful.index.LinearConstraintsIndexFactory;
import minerful.index.ModularConstraintsSorter;
import minerful.index.comparator.modular.ConstraintSortingPolicy;
import minerful.postprocessing.params.PostProcessingCmdParameters;
public class ConflictAndRedundancyResolver {
public static final String CONFLICT_REDUNDANCY_CHECK_CODE = "'CR-check'";
public static final int MAXIMUM_VISIBLE_CONSTRAINTS_FOR_REDUNDANCY_CHECK = 24;
private ProcessModel safeProcess;
private ProcessModel originalProcess;
private ConstraintsBag originallHierarchyUnredundantBag;
private boolean checking;
private final boolean avoidingRedundancy;
private final boolean avoidingRedundancyWithDoubleCheck;
private ModularConstraintsSorter sorter;
private SubsumptionHierarchyMarker subsumMarker;
private ProcessModel fixpointModel;
private Automaton safeAutomaton;
private TreeSet<Constraint> blackboard;
private long secondPassStartTime = -1;
private static Logger logger = Logger.getLogger(ConflictAndRedundancyResolver.class.getCanonicalName());
private Set<Constraint>
originalHierarchyUnredundantConstraints,
notSurelySafeProcessConstraints,
conflictingConstraintsInOriginalNonRedundantModel,
conflictingConstraintsInOriginalModel,
conflictingConstraints,
redundantConstraints,
redundantConstraintsAtSecondPass,
redundantConstraintsInOriginalModel;
private int
conflictChecksPerformed,
redundancyChecksPerformed;
private ConstraintSortingPolicy[] rankingPolicies;
public ConflictAndRedundancyResolver(ProcessModel process, PostProcessingCmdParameters params) {
this.avoidingRedundancyWithDoubleCheck = params.postProcessingAnalysisType.isRedundancyResolutionDoubleCheckRequested();
this.avoidingRedundancy = this.avoidingRedundancyWithDoubleCheck || params.postProcessingAnalysisType.isRedundancyResolutionRequested();
this.originalProcess = process;
this.sorter = new ModularConstraintsSorter();
this.rankingPolicies = params.sortingPolicies;
this.subsumMarker = new SubsumptionHierarchyMarker();
this.subsumMarker.setPolicy(SubsumptionHierarchyMarkingPolicy.CONSERVATIVE);
this.init();
}
public ConflictAndRedundancyResolver(ProcessModel process, PostProcessingCmdParameters params, ProcessModel fixpointModel) {
this.avoidingRedundancyWithDoubleCheck = params.postProcessingAnalysisType.isRedundancyResolutionDoubleCheckRequested();
this.avoidingRedundancy = this.avoidingRedundancyWithDoubleCheck || params.postProcessingAnalysisType.isRedundancyResolutionRequested();
this.originalProcess = process;
this.sorter = new ModularConstraintsSorter();
this.rankingPolicies = params.sortingPolicies;
this.subsumMarker = new SubsumptionHierarchyMarker();
this.subsumMarker.setPolicy(SubsumptionHierarchyMarkingPolicy.CONSERVATIVE);
this.fixpointModel = fixpointModel;
this.init();
}
public void init() {
this.checking = false;
this.conflictChecksPerformed = 0;
this.redundancyChecksPerformed = 0;
this.conflictingConstraints = new TreeSet<Constraint>();
this.redundantConstraints = new TreeSet<Constraint>();
this.redundantConstraintsAtSecondPass = new TreeSet<Constraint>();
// Pre-processing: mark subsumption-redundant constraints
this.subsumMarker.setConstraintsBag(this.originalProcess.bag);
this.subsumMarker.markSubsumptionRedundantConstraints();
// Create a copy of the original bag where subsumption-redundant constraints are removed
this.originallHierarchyUnredundantBag = (ConstraintsBag) this.originalProcess.bag.clone();
this.originallHierarchyUnredundantBag.removeMarkedConstraints();
this.originalHierarchyUnredundantConstraints = this.originallHierarchyUnredundantBag.getAllConstraints();
/*
* The blackboard is meant to associate to all constraints a tick,
* whenever the constraint has already been checked
*/
this.sorter.setConstraints(originalHierarchyUnredundantConstraints);
this.blackboard = new TreeSet<Constraint>(this.sorter.getComparator());
ConstraintsBag safeBag;
if (fixpointModel != null) {
safeBag = fixpointModel.bag;
} else {
safeBag = this.originallHierarchyUnredundantBag.getOnlyFullySupportedConstraintsInNewBag();
}
Collection<Constraint> safeConstraints = safeBag.getAllConstraints();
this.sorter.setConstraints(safeConstraints);
/*
* Step 1: Consider as safe those constraints that have a support
* of 100%: if they have a support of 100%, a model already exists for
* them: the log itself. So, their conjunction cannot be unsatisfiable.
*/
if (avoidingRedundancy) {
logger.info("Checking redundancies of fully-supported constraints...");
ConstraintsBag emptyBag = this.originallHierarchyUnredundantBag.createEmptyIndexedCopy();
this.safeProcess = new ProcessModel(this.originalProcess.getTaskCharArchive(), emptyBag);
Automaton candidateAutomaton = null;
this.safeAutomaton = this.safeProcess.buildAlphabetAcceptingAutomaton();
for (Constraint candidateCon : this.sorter.sort(this.rankingPolicies)) {
logger.trace("Checking redundancy of " + candidateCon);
candidateAutomaton = new RegExp(candidateCon.getRegularExpression()).toAutomaton();
if (!candidateCon.isRedundant() // If this constraint was not already found to be redundant in some way before
&& !this.isConstraintAlreadyChecked(candidateCon) // If this constraint was not already checked
&& this.checkRedundancy(this.safeAutomaton, this.safeProcess.bag, candidateAutomaton, candidateCon)
// and the check of redundancy has a negative response (namely, it is not redundant)
) {
this.safeAutomaton = this.intersect(this.safeAutomaton, candidateAutomaton);
this.safeProcess.bag.add(candidateCon.getBase(), candidateCon);
}
blackboard.add(candidateCon);
}
} else {
this.safeProcess = new ProcessModel(this.originalProcess.getTaskCharArchive(), safeBag);
this.safeAutomaton = this.safeProcess.buildAutomaton();
for (Constraint c : LinearConstraintsIndexFactory.getAllConstraints(safeBag)) {
//System.out.println("PRESENTATION -- The safe constraint: " + c + " supp: " + c.support + "; conf: " + c.confidence + "; inf.f: " + c.interestFactor + " rex: " + c.getRegularExpression());
//System.out.println("PRESENTATION -- The safe constraint automaton: " + c + " \n" + safeProcess.buildAlphabetAcceptingAutomaton().intersection(new RegExp(c.getRegularExpression()).toAutomaton()).toDot());
blackboard.add(c);
}
}
//System.out.println("PRESENTATION -- The safe automaton:\n" + safeAutomaton.toDot());
ConstraintsBag unsafeBag = this.originallHierarchyUnredundantBag.createComplementOfCopyPrunedByThreshold(Constraint.MAX_SUPPORT);
// for (Constraint c : LinearConstraintsIndexFactory.getAllConstraints(unsafeBag)) {
// blackboard.add(c);
// }
this.sorter.setConstraints(unsafeBag.getAllConstraints());
this.notSurelySafeProcessConstraints = this.sorter.sort(this.rankingPolicies);
}
public ProcessModel resolveConflictsOrRedundancies() {
logger.info("Checking redundancies and conflicts of non-fully-supported constraints");
this.checking = true;
Automaton candidateAutomaton = null;
int nConstraintsToCheck = this.notSurelySafeProcessConstraints.size();
int i = 1;
for (Constraint candidateCon : this.notSurelySafeProcessConstraints) {
logger.trace(i + "/" + nConstraintsToCheck + " Checking " + candidateCon.toString() + " for redundancies or conflicts...");
if (!isConstraintAlreadyChecked(candidateCon)) {
logger.trace("Checking consistency of " + candidateCon);
//System.out.println("PRESENTATION -- The unsafe constraint: " + candidateCon + " supp: " + candidateCon.support + "; conf: " + candidateCon.confidence + "; inf.f: " + candidateCon.interestFactor);
//System.out.println("PRESENTATION -- The unsafe constraint automaton: " + candidateCon + " \n" + safeProcess.buildAlphabetAcceptingAutomaton().intersection(new RegExp(candidateCon.getRegularExpression()).toAutomaton()).toDot());
candidateAutomaton = new RegExp(candidateCon.getRegularExpression()).toAutomaton();
if (!this.avoidingRedundancy || this.checkRedundancy(candidateAutomaton, candidateCon))
resolveConflictsRecursively(candidateAutomaton, candidateCon);
}
i++;
}
// safeProcess.bag = safeProcess.bag.markSubsumptionRedundantConstraints();
// safeProcess.bag.removeMarkedConstraints();
if (this.avoidingRedundancyWithDoubleCheck) {
logger.info("Checking redundant constraints in a second pass...");
this.doubleCheckRedundancies();
}
this.subsumMarker.setConstraintsBag(this.safeProcess.bag);
this.subsumMarker.markSubsumptionRedundantConstraints();
this.checking = false;
return this.safeProcess;
}
private void doubleCheckRedundancies() {
this.secondPassStartTime = System.currentTimeMillis();
if (this.safeProcess.howManyConstraints() > 1) {
sorter.setConstraints(this.safeProcess.getAllConstraints());
// Let us take ALL constraints of the safe process
ArrayList<Constraint> constraintsSortedForDoubleCheck = new ArrayList<Constraint>(sorter.sort(this.rankingPolicies));
// Let us visit them in the reverse order with which they were added -- so as to be consistent with the given ranking policy
ListIterator<Constraint> iterator =
constraintsSortedForDoubleCheck.listIterator(
constraintsSortedForDoubleCheck.size()
// The last one is the constraint that we checked last. In theory, it should not constitute a problem
- 2);
Constraint candidateCon = null;
Automaton secondPassGridCheckAutomaton = null;
while (iterator.hasPrevious()) {
candidateCon = iterator.previous();
logger.trace("Second-pass grid check of constraint: " + candidateCon);
secondPassGridCheckAutomaton =
AutomatonFactory.buildAutomaton(
this.safeProcess.bag,
this.safeProcess.getTaskCharArchive().getIdentifiersAlphabet(),
candidateCon);
// If the safe automaton accepts
if (secondPassGridCheckAutomaton.subsetOf(
// ... all the constraints BUT the current one...
// this.safeAutomaton.minus(
// ... accepts a subset of the languages that the current one accepts...
// new RegExp(candidateCon.getRegularExpression()).toAutomaton()))) {
this.safeAutomaton)) {
// ... then the current constraint is basically useless. Explanation is: some other constraint had been added later that made an already saved constraint redundant.
this.safeProcess.bag.remove(candidateCon);
this.redundantConstraintsAtSecondPass.add(candidateCon);
this.redundantConstraints.add(candidateCon);
candidateCon.setRedundant(true);
logger.warn(candidateCon + " is redundant (second-pass grid check)");
}
redundancyChecksPerformed++;
}
}
}
public void resolveConflictsRecursively(Automaton candidateAutomaton, Constraint candidateCon) {
if (isConstraintAlreadyChecked(candidateCon)) {
logger.trace(candidateCon + " was already checked");
return;
} else {
conflictChecksPerformed++;
blackboard.add(candidateCon);
}
logger.trace("Checking conflict with " + candidateCon + ": Conjuncting the safe automaton with Reg.exp: " + candidateCon.getRegularExpression());
Automaton auxAutomaton = this.intersect(this.safeAutomaton, candidateAutomaton);
Constraint
relaxedCon = null;
if (isAutomatonEmpty(auxAutomaton)) {
logger.warn(candidateCon
+ " conflicts with the existing safe automaton!");
// logger.warn("Current set of safe constraints: " + this.safeProcess.bag);
conflictingConstraints.add(candidateCon);
candidateCon.setConflicting(true);
relaxedCon = candidateCon.getConstraintWhichThisIsBasedUpon();
if (relaxedCon == null) {
relaxedCon = candidateCon.suggestConstraintWhichThisShouldBeBasedUpon();
if (relaxedCon != null) {
relaxedCon = candidateCon.createConstraintWhichThisShouldBeBasedUpon();
logger.trace(relaxedCon + " included in process model as relaxation, replacing " + candidateCon);
}
}
if (relaxedCon == null || relaxedCon == candidateCon) {
logger.warn(candidateCon + " has to be removed at once");
} else {
logger.trace(candidateCon + " relaxed to " + relaxedCon);
resolveConflictsRecursively(new RegExp(relaxedCon.getRegularExpression()).toAutomaton(), relaxedCon);
}
if (candidateCon.getSubFamily().equals(RelationConstraintSubFamily.COUPLING)) {
MutualRelationConstraint coCandidateCon = (MutualRelationConstraint) candidateCon;
Constraint
forwardCon = coCandidateCon.getForwardConstraint(),
backwardCon = coCandidateCon.getBackwardConstraint();
if (forwardCon != null && backwardCon != null) {
logger.trace("Splitting the coupling relation constraint "
+ coCandidateCon + " into "
+ coCandidateCon.getForwardConstraint() + " and "
+ coCandidateCon.getBackwardConstraint());
this.resolveConflictsRecursively(
new RegExp(forwardCon.getRegularExpression()).toAutomaton(),
forwardCon);
this.resolveConflictsRecursively(
new RegExp(backwardCon.getRegularExpression()).toAutomaton(),
backwardCon);
}
}
} else {
if (fixpointModel == null) {
safeAutomaton = auxAutomaton;
//System.out.println("PRESENTATION -- Safe automaton so far: " + safeAutomaton.toDot());
safeProcess.bag.add(candidateCon.getBase(), candidateCon);
}
}
}
private Automaton intersect(Automaton automaton, Automaton candidateAutomaton) {
Automaton intersectedAutomaton = automaton.intersection(candidateAutomaton);
logger.trace("Automaton states: " + intersectedAutomaton.getNumberOfStates() + "; transitions: " + intersectedAutomaton.getNumberOfTransitions());
return intersectedAutomaton;
}
private boolean checkRedundancy(Automaton candidateAutomaton, Constraint candidateCon) {
return checkRedundancy(this.safeAutomaton, this.safeProcess.bag, candidateAutomaton, candidateCon);
}
private boolean checkRedundancy(Automaton safeAutomaton, ConstraintsBag safeBag, Automaton candidateAutomaton, Constraint candidateCon) {
redundancyChecksPerformed++;
logger.trace("Checking redundancy of " + candidateCon);
// If candidateCon is not redundant, i.e., if the language of safeAutomaton is not a subset of the language of automaton, then candidateCon can be included
if (!safeAutomaton.subsetOf(candidateAutomaton)) {
return true;
} else {
logger.warn(candidateCon + " is redundant. It is already implied" +
(safeBag.howManyConstraints() < ConflictAndRedundancyResolver.MAXIMUM_VISIBLE_CONSTRAINTS_FOR_REDUNDANCY_CHECK
? " by " + LinearConstraintsIndexFactory.getAllConstraints(safeBag)
: " by the current set of constraints."
)
);
this.redundantConstraints.add(candidateCon);
candidateCon.setRedundant(true);
return false;
}
}
private boolean isConstraintAlreadyChecked(Constraint candidateCon) {
return blackboard.contains(candidateCon);
}
private boolean isAutomatonEmpty(Automaton automaton) {
return automaton.isEmpty() || automaton.isEmptyString();
}
public Set<Constraint> getIdentifiedConflictingConstraints() {
return this.conflictingConstraints;
}
public Set<Constraint> getIdentifiedRedundantConstraints() {
return this.redundantConstraints;
}
public Set<Constraint> getIdentifiedRedundantConstraintsDuringSecondPass() {
return this.redundantConstraintsAtSecondPass;
}
public Set<Constraint> getConflictingConstraintsInOriginalUnredundantModel() {
if (checking == true) {
throw new IllegalStateException("Check in progress");
}
if (conflictingConstraintsInOriginalNonRedundantModel == null) {
if (conflictingConstraints != null) {
conflictingConstraintsInOriginalNonRedundantModel = new TreeSet<Constraint>();
conflictingConstraintsInOriginalNonRedundantModel.addAll(this.originalHierarchyUnredundantConstraints);
conflictingConstraintsInOriginalNonRedundantModel.retainAll(new TreeSet<Constraint>(conflictingConstraints));
} else {
throw new IllegalStateException("Conflict check not yet performed");
}
}
return conflictingConstraintsInOriginalNonRedundantModel;
}
public Set<Constraint> getConflictingConstraintsInOriginalModel() {
if (checking == true) {
throw new IllegalStateException("Check in progress");
}
if (conflictingConstraintsInOriginalModel == null) {
if (conflictingConstraints != null) {
conflictingConstraintsInOriginalModel = new TreeSet<Constraint>();
conflictingConstraintsInOriginalModel.addAll(this.originalProcess.bag.getAllConstraints());
conflictingConstraintsInOriginalModel.retainAll(new TreeSet<Constraint>(conflictingConstraints));
} else {
throw new IllegalStateException("Conflict check not yet performed");
}
}
return conflictingConstraintsInOriginalModel;
}
public Set<Constraint> getRedundantConstraintsInOriginalModel() {
if (checking == true) {
throw new IllegalStateException("Check in progress");
}
if (redundantConstraintsInOriginalModel == null) {
if (redundantConstraints != null) {
redundantConstraintsInOriginalModel = new TreeSet<Constraint>();
redundantConstraintsInOriginalModel.addAll(this.originalProcess.bag.getAllConstraints());
redundantConstraintsInOriginalModel.retainAll(redundantConstraints);
} else {
throw new IllegalStateException("Conflict check not yet performed");
}
}
return redundantConstraintsInOriginalModel;
}
public Set<Constraint> getRedundantConstraintsInOriginalUnredundantModel() {
if (checking == true) {
throw new IllegalStateException("Check in progress");
}
if (redundantConstraintsInOriginalModel == null) {
if (redundantConstraints != null) {
redundantConstraintsInOriginalModel = new TreeSet<Constraint>();
redundantConstraintsInOriginalModel.addAll(this.originalProcess.bag.getAllConstraints());
redundantConstraintsInOriginalModel.retainAll(redundantConstraints);
} else {
throw new IllegalStateException("Conflict check not yet performed");
}
}
return redundantConstraintsInOriginalModel;
}
public ProcessModel getSafeProcess() {
return safeProcess;
}
public int howManyInputConstraints() {
return this.originalProcess.bag.howManyConstraints();
}
public int howManyInputUnredundantConstraints() {
return this.originalHierarchyUnredundantConstraints.size();
}
public int howManyPerformedConflictChecks() {
if (checking == true) {
throw new IllegalStateException("Conflict check in progress");
}
return this.conflictChecksPerformed;
}
public int howManyPerformedRedundancyChecks() {
if (checking == true) {
throw new IllegalStateException("Conflict check in progress");
}
return this.redundancyChecksPerformed;
}
public void printComputationStats(long startTime, long finishTime) {
StringBuffer
csvSummaryBuffer = new StringBuffer(),
csvSummaryLegendBuffer = new StringBuffer(),
csvSummaryComprehensiveBuffer = new StringBuffer();
csvSummaryBuffer.append(ConflictAndRedundancyResolver.CONFLICT_REDUNDANCY_CHECK_CODE);
csvSummaryLegendBuffer.append("'Operation code'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.howManyInputConstraints());
csvSummaryLegendBuffer.append("'Input constraints'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.howManyInputUnredundantConstraints());
csvSummaryLegendBuffer.append("'Input constraints from hierarchy-unredundant model'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.howManyPerformedConflictChecks());
csvSummaryLegendBuffer.append("'Performed conflict checks'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.getIdentifiedConflictingConstraints().size());
csvSummaryLegendBuffer.append("'Identified conflicting constraints'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.getConflictingConstraintsInOriginalModel().size());
csvSummaryLegendBuffer.append("'Conflicting constraints in original model'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.getConflictingConstraintsInOriginalUnredundantModel().size());
csvSummaryLegendBuffer.append("'Conflicting constraints in original hierarchy-unredundant model'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.howManyPerformedRedundancyChecks());
csvSummaryLegendBuffer.append("'Performed redundancy checks'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.getIdentifiedRedundantConstraints().size());
csvSummaryLegendBuffer.append("'Identified redundant constraints'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.getIdentifiedRedundantConstraintsDuringSecondPass().size());
csvSummaryLegendBuffer.append("'Identified redundant constraints in second pass'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.getRedundantConstraintsInOriginalModel().size());
csvSummaryLegendBuffer.append("'Redundant constraints in original model'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.getRedundantConstraintsInOriginalUnredundantModel().size());
csvSummaryLegendBuffer.append("'Redundant constraints in original hierarchy-unredundant model'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(secondPassStartTime > 0 ? (finishTime - secondPassStartTime) : 0);
csvSummaryLegendBuffer.append("'Time for second redundancy check round'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(finishTime - startTime);
csvSummaryLegendBuffer.append("'Total time to resolve conflicts and redundancies'");
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append(";");
csvSummaryComprehensiveBuffer.append("\n\nConflict resolution: \n");
csvSummaryComprehensiveBuffer.append(csvSummaryLegendBuffer.toString());
csvSummaryComprehensiveBuffer.append("\n");
csvSummaryComprehensiveBuffer.append(csvSummaryBuffer.toString());
logger.info(csvSummaryComprehensiveBuffer.toString());
}
} | 27,035 | 49.724203 | 229 | java |
Janus | Janus-master/src/minerful/postprocessing/pruning/SubsumptionCheckSummaryMaker.java | package minerful.postprocessing.pruning;
import java.util.Collection;
import java.util.Comparator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.TreeMap;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintFamily.RelationConstraintSubFamily;
import minerful.concept.constraint.relation.MutualRelationConstraint;
import minerful.concept.constraint.relation.RelationConstraint;
import minerful.postprocessing.pruning.SubsumptionCheckSummaryMaker.Subsumption.Equalisation;
import minerful.postprocessing.pruning.SubsumptionCheckSummaryMaker.Subsumption.Extension;
import minerful.postprocessing.pruning.SubsumptionCheckSummaryMaker.Subsumption.None;
import minerful.postprocessing.pruning.SubsumptionCheckSummaryMaker.Subsumption.Restriction;
import minerful.postprocessing.pruning.SubsumptionCheckSummaryMaker.Subsumption.SubsumptionKind;
import minerful.postprocessing.pruning.SubsumptionCheckSummaryMaker.Subsumption.SubsumptionKindClassComparator;
import minerful.postprocessing.pruning.SubsumptionCheckSummaryMaker.Subsumption.SubsumptionKindComparator;
public class SubsumptionCheckSummaryMaker {
public static class Subsumption {
public static interface SubsumptionKind {
String getKind();
}
public static enum None implements SubsumptionKind {
NONE;
@Override
public String getKind() {
return this.toString();
}
}
public static enum Equalisation implements SubsumptionKind {
EQUAL_TO;
@Override
public String getKind() {
return this.toString();
}
}
public static enum Restriction implements SubsumptionKind {
DIRECT_CHILD_OF,
DESCENDANT_OF,
INCLUDES_AS_FORWARD,
INCLUDES_AS_BACKWARD,
SAME_TEMPLATE_SAME_ACTIVATION_TARGET_INCLUDED_IN,
// Both-ways (hierarchy and set-inclusion) relaxation
TEMPLATE_DESCENDANT_OF_SAME_ACTIVATION_TARGET_INCLUDED_IN;
@Override
public String getKind() {
return this.toString();
}
}
public static enum Extension implements SubsumptionKind {
DIRECT_PARENT_OF,
ANCESTOR_OF,
IS_FORWARD_OF,
IS_BACKWARD_OF,
SAME_TEMPLATE_SAME_ACTIVATION_TARGET_INCLUDES,
// Both-ways (hierarchy and set-inclusion) restriction
TEMPLATE_ANCESTOR_OF_SAME_ACTIVATION_TARGET_INCLUDES;
@Override
public String getKind() {
return this.toString();
}
}
public final Constraint constraint;
public final SubsumptionKind kind;
public Subsumption(Constraint constraint,
SubsumptionKind subsumptionKind) {
this.constraint = constraint;
this.kind = subsumptionKind;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append(kind);
builder.append('(');
builder.append(constraint);
builder.append(")");
return builder.toString();
}
public static class SubsumptionKindClassComparator implements Comparator<Class<? extends SubsumptionKind>> {
@Override
public int compare(Class<? extends SubsumptionKind> o1, Class<? extends SubsumptionKind> o2) {
return o1.getName().compareTo(o2.getName());
}
}
public static class SubsumptionKindComparator implements Comparator<SubsumptionKind> {
@Override
public int compare(SubsumptionKind o1, SubsumptionKind o2) {
return o1.getKind().compareTo(o2.getKind());
}
}
}
private static final SubsumptionKindClassComparator cla_compa = new SubsumptionKindClassComparator();
private static final SubsumptionKindComparator kin_compa = new SubsumptionKindComparator();
private NavigableMap<SubsumptionKind, Integer> checks;
private Map<Class<? extends Subsumption.SubsumptionKind>, Integer> checksSummary;
private Constraint[] model;
public SubsumptionCheckSummaryMaker(Constraint[] model) {
this.initCounters();
this.model = model;
}
public SubsumptionCheckSummaryMaker(Collection<Constraint> allConstraints) {
this.initCounters();
this.model = allConstraints.toArray(new Constraint[allConstraints.size()]);
}
private void initCounters() {
checks = new TreeMap<SubsumptionKind, Integer>(kin_compa);
checksSummary = new TreeMap<Class<? extends Subsumption.SubsumptionKind>, Integer>(cla_compa);
// Initialisation
for (SubsumptionKind kind : Subsumption.Equalisation.values()) {
checks.put(kind, 0);
}
checksSummary.put(Equalisation.class, 0);
for (SubsumptionKind kind : Subsumption.Restriction.values()) {
checks.put(kind, 0);
}
checksSummary.put(Restriction.class, 0);
for (SubsumptionKind kind : Subsumption.Extension.values()) {
checks.put(kind, 0);
}
checksSummary.put(Extension.class, 0);
for (SubsumptionKind kind : Subsumption.None.values()) {
checks.put(kind, 0);
}
checksSummary.put(None.class, 0);
}
public void resetCounters() {
this.initCounters();
}
public Subsumption[] checkSubsumption(Constraint[] constraintsToBeChecked) {
Subsumption[] subs = new Subsumption[constraintsToBeChecked.length];
for (int i = 0; i < constraintsToBeChecked.length; i++) {
subs[i] = this.checkSubsumption(constraintsToBeChecked[i]);
}
return subs;
}
public Subsumption checkSubsumption(Constraint c) {
Subsumption subsumption = null;
Constraint modelCon = null;
for (int i = 0; i < model.length && subsumption == null; i++) {
modelCon = model[i];
if (modelCon.equals(c)) {
subsumption = new Subsumption(modelCon, Equalisation.EQUAL_TO);
} else if (modelCon.isChildOf(c)) {
subsumption = new Subsumption(modelCon, Extension.DIRECT_PARENT_OF);
} else if (c.isChildOf(modelCon)) {
subsumption = new Subsumption(modelCon, Restriction.DIRECT_CHILD_OF);
} else if (modelCon.isDescendantAlongSameBranchOf(c)) {
subsumption = new Subsumption(modelCon, Extension.ANCESTOR_OF);
} else if (c.isDescendantAlongSameBranchOf(modelCon)) {
subsumption = new Subsumption(modelCon, Restriction.DESCENDANT_OF);
} else if (c.getSubFamily().equals(RelationConstraintSubFamily.COUPLING)) {
MutualRelationConstraint coCon = ((MutualRelationConstraint)c);
if (coCon.getPossibleForwardConstraint().equals(modelCon)) {
subsumption = new Subsumption(modelCon, Restriction.INCLUDES_AS_FORWARD);
}
if (coCon.getPossibleBackwardConstraint().equals(modelCon)) {
subsumption = new Subsumption(modelCon, Restriction.INCLUDES_AS_BACKWARD);
}
} else if (modelCon.getSubFamily().equals(RelationConstraintSubFamily.COUPLING)) {
MutualRelationConstraint coCheckCon = ((MutualRelationConstraint)modelCon);
if (coCheckCon.getPossibleForwardConstraint().equals(c)) {
subsumption = new Subsumption(modelCon, Extension.IS_FORWARD_OF);
}
if (coCheckCon.getPossibleBackwardConstraint().equals(c)) {
subsumption = new Subsumption(modelCon, Extension.IS_BACKWARD_OF);
}
} else if (modelCon.isBranched() || c.isBranched()) {
if (modelCon instanceof RelationConstraint && c instanceof RelationConstraint) {
RelationConstraint reModelCon = ((RelationConstraint)modelCon);
RelationConstraint reC = ((RelationConstraint)c);
if (reModelCon.getBase().equals(reC.getBase())) {
if (reModelCon.type.equals(reC.type)) {
if (reModelCon.hasTargetSetStrictlyIncludingTheOneOf(reC)) {
subsumption = new Subsumption(modelCon, Restriction.SAME_TEMPLATE_SAME_ACTIVATION_TARGET_INCLUDED_IN);
} else if (reC.hasTargetSetStrictlyIncludingTheOneOf(reModelCon)) {
subsumption = new Subsumption(modelCon, Extension.SAME_TEMPLATE_SAME_ACTIVATION_TARGET_INCLUDES);
}
} else if (reModelCon.isTemplateDescendantAlongSameBranchOf(reC)) {
if (reC.hasTargetSetStrictlyIncludingTheOneOf(reModelCon)) {
subsumption = new Subsumption(modelCon, Extension.TEMPLATE_ANCESTOR_OF_SAME_ACTIVATION_TARGET_INCLUDES);
}
} else if (reC.isTemplateDescendantAlongSameBranchOf(reModelCon)) {
if (reModelCon.hasTargetSetStrictlyIncludingTheOneOf(reC)) {
subsumption = new Subsumption(modelCon, Restriction.TEMPLATE_DESCENDANT_OF_SAME_ACTIVATION_TARGET_INCLUDED_IN);
}
}
}
}
}
}
this.categoriseSubsumption(subsumption);
return subsumption;
}
public void categoriseSubsumption(Subsumption auXub) {
if (auXub != null) {
checks.put(auXub.kind, checks.get(auXub.kind) + 1);
checksSummary.put(auXub.kind.getClass(), checksSummary.get(auXub.kind.getClass()) + 1);
} else {
checks.put(Subsumption.None.NONE, checks.get(Subsumption.None.NONE) + 1);
checksSummary.put(Subsumption.None.class, checksSummary.get(Subsumption.None.class) + 1);
}
}
public String csvLegend() {
StringBuilder legeSBuil = new StringBuilder();
legeSBuil.append("Code");
for (Entry<SubsumptionKind, Integer> checkEntry : checks.entrySet()) {
legeSBuil.append(";");
legeSBuil.append(checkEntry.getKey());
}
for (Entry<Class<? extends SubsumptionKind>, Integer> checkSumEntry : checksSummary.entrySet()) {
legeSBuil.append(";");
legeSBuil.append(checkSumEntry.getKey().getName().substring(checkSumEntry.getKey().getName().lastIndexOf('$') + 1));
}
return legeSBuil.toString();
}
public String csvContent() {
StringBuilder
dataSBuil = new StringBuilder();
dataSBuil.append("SUB");
for (Entry<SubsumptionKind, Integer> checkEntry : checks.entrySet()) {
dataSBuil.append(";");
dataSBuil.append(checkEntry.getValue());
}
for (Entry<Class<? extends SubsumptionKind>, Integer> checkSumEntry : checksSummary.entrySet()) {
dataSBuil.append(";");
dataSBuil.append(checkSumEntry.getValue());
}
return dataSBuil.toString();
}
public String csv() {
return csvLegend() + "\n" + csvContent();
}
public NavigableMap<SubsumptionKind, Integer> getChecks() {
return checks;
}
public Map<Class<? extends Subsumption.SubsumptionKind>, Integer> getChecksSummary() {
return checksSummary;
}
public Constraint[] getModel() {
return model;
}
public Subsumption[] checkSubsumption(Collection<Constraint> allConstraints) {
return this.checkSubsumption(allConstraints.toArray(new Constraint[allConstraints.size()]));
}
} | 10,113 | 35.512635 | 119 | java |
Janus | Janus-master/src/minerful/postprocessing/pruning/SubsumptionHierarchyMarker.java | package minerful.postprocessing.pruning;
import java.util.Collection;
import minerful.concept.TaskChar;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintFamily.RelationConstraintSubFamily;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.relation.MutualRelationConstraint;
import minerful.concept.constraint.relation.NegativeRelationConstraint;
import minerful.utils.MessagePrinter;
public class SubsumptionHierarchyMarker {
private static final String HIERARCHY_CODE = "'SH-check'";
private static MessagePrinter logger = MessagePrinter.getInstance(SubsumptionHierarchyMarker.class.getCanonicalName());
private int numberOfMarkedConstraints = 0;
private boolean checking = false;
private ConstraintsBag constraintsBag = null;
private SubsumptionHierarchyMarkingPolicy policy = null;
private ConstraintsBag fixpointConstraintsBag = null;
public SubsumptionHierarchyMarker() {
this.policy = SubsumptionHierarchyMarkingPolicy.EAGER_ON_SUPPORT_OVER_HIERARCHY;
}
public SubsumptionHierarchyMarker(ConstraintsBag constraintsBag) {
this();
this.setConstraintsBag(constraintsBag);
}
public SubsumptionHierarchyMarker(ConstraintsBag constraintsBag, ConstraintsBag fixpointBag) {
this.setConstraintsBag(constraintsBag);
this.setFixpointConstraintsBag(fixpointBag);
this.policy = SubsumptionHierarchyMarkingPolicy.EAGER_ON_HIERARCHY_OVER_SUPPORT;
}
public void setConstraintsBag(ConstraintsBag constraintsBag) {
this.constraintsBag = constraintsBag;
}
public ConstraintsBag getConstraintsBag() {
return this.constraintsBag;
}
public int getNumberOfMarkedConstraints() {
return numberOfMarkedConstraints;
}
public boolean isChecking() {
return checking;
}
public ConstraintsBag markSubsumptionRedundantConstraints() {
return this.markSubsumptionRedundantConstraints(constraintsBag.getTaskChars());
}
public ConstraintsBag markSubsumptionRedundantConstraints(Collection<TaskChar> targetTaskChars) {
if (this.constraintsBag == null)
throw new IllegalStateException("Constraints bag not initialized");
this.numberOfMarkedConstraints = 0;
this.checking = true;
// exploit the ordering
MutualRelationConstraint coExiCon = null;
NegativeRelationConstraint noRelCon = null;
for (TaskChar key : targetTaskChars) {
for (Constraint currCon : constraintsBag.getConstraintsOf(key)) {
if (!currCon.isRedundant()) {
// If the policy is to be eager wrt the hierarchy subsumptions, no matter the support, this is the way to go
if (this.policy.equals(SubsumptionHierarchyMarkingPolicy.EAGER_ON_HIERARCHY_OVER_SUPPORT)) {
markGenealogyAsRedundant(currCon.getConstraintWhichThisIsBasedUpon(), currCon, key, constraintsBag);
} else {
// Otherwise, eliminate those constraints that are in the hierarchy behind the current one, if...
if (currCon.hasConstraintToBaseUpon()) {
// ... if the current one has the same support of all others
if (currCon.isMoreInformativeThanGeneric()) {
logger.trace(
"Removing the genealogy of {1}, starting with {0}, because {1} is subsumed by {0} and more informative than the whole genalogy",
currCon.getConstraintWhichThisIsBasedUpon(),
currCon
);
markGenealogyAsRedundant(currCon.getConstraintWhichThisIsBasedUpon(), currCon, key, constraintsBag);
} else {
// If we want to be "conservative" (namely, a higher support justifies the removal of more strict constraints, this is the way to go
if (this.policy.equals(SubsumptionHierarchyMarkingPolicy.EAGER_ON_SUPPORT_OVER_HIERARCHY)) {
logger.trace(
"Removing {0} because {1} has a higher support and {0} is subsumed by it",
currCon,
currCon.getConstraintWhichThisIsBasedUpon());
// this.markAsRedundant(currCon);
}
}
}
}
if (currCon.getSubFamily() == RelationConstraintSubFamily.COUPLING) {
if (this.policy.equals(SubsumptionHierarchyMarkingPolicy.EAGER_ON_HIERARCHY_OVER_SUPPORT)) {
this.markAsRedundant(coExiCon.getForwardConstraint());
this.markAsRedundant(coExiCon.getBackwardConstraint());
} else {
coExiCon = (MutualRelationConstraint) currCon;
if (coExiCon.hasImplyingConstraints()) {
if (coExiCon.isAsInformativeAsTheImplyingConstraints()) {
logger.trace("Removing {0}" +
", which is the forward, and {1}" +
", which is the backward, because {2}" +
" is the Mutual Relation referring to them and more informative",
coExiCon.getForwardConstraint(),
coExiCon.getBackwardConstraint(),
coExiCon);
// constraintsBag.remove(key, coExiCon.getForwardConstraint());
this.markAsRedundant(coExiCon.getForwardConstraint());
// constraintsBag.remove(key, coExiCon.getBackwardConstraint());
this.markAsRedundant(coExiCon.getBackwardConstraint());
// } else if (coExiCon.isMoreReliableThanAnyOfImplyingConstraints()){
// // Remove the weaker, if any
// if (coExiCon.isMoreReliableThanForwardConstraint()) {
// nuBag.remove(key, coExiCon.getForwardConstraint());
// } else {
// nuBag.remove(key, coExiCon.getBackwardConstraint());
// }
} else {
if (this.policy.equals(SubsumptionHierarchyMarkingPolicy.EAGER_ON_SUPPORT_OVER_HIERARCHY)) {
// constraintsBag.remove(key, coExiCon);
this.markAsRedundant(coExiCon);
}
}
}
}
}
if (currCon.getSubFamily() == RelationConstraintSubFamily.NEGATIVE) {
noRelCon = (NegativeRelationConstraint) currCon;
if (noRelCon.hasOpponent()) {
if (noRelCon.isMoreReliableThanTheOpponent()) {
logger.trace("Removing {0}" +
" because {1} is the opponent of {0}" +
" but less supported",
noRelCon.getOpponent(),
noRelCon);
// constraintsBag.remove(key, noRelCon.getOpponent());
this.markAsRedundant(noRelCon.getOpponent());
} else {
logger.trace("Removing {0}" +
" because {0} is the opponent of {1}" +
" but less supported",
noRelCon,
noRelCon.getOpponent());
// constraintsBag.remove(key, noRelCon);
this.markAsRedundant(noRelCon);
}
}
}
}
}
}
this.checking = false;
return constraintsBag;
}
public ConstraintsBag markSubsumptionRedundantConstraintsFromSeed() {
return this.markSubsumptionRedundantConstraintsFromSeed(constraintsBag.getTaskChars());
}
/**
* The hierarchila search for redundancies starts from a seed process Model (fixpoint model)
* <p>
* In other words, the aim is to find all the constraints derived from the seed model.
*
* @param targetTaskChars
* @return
*/
public ConstraintsBag markSubsumptionRedundantConstraintsFromSeed(Collection<TaskChar> targetTaskChars) {
if (this.constraintsBag == null)
throw new IllegalStateException("Constraints bag not initialized");
this.numberOfMarkedConstraints = 0;
this.checking = true;
// exploit the ordering
MutualRelationConstraint coExiCon = null;
NegativeRelationConstraint noRelCon = null;
for (TaskChar key : targetTaskChars) {
for (Constraint currCon : fixpointConstraintsBag.getConstraintsOf(key)) {
// for (Constraint currCon : fixpointConstraintsBag.getAllConstraints()) {
// if (!currCon.isRedundant()) {
// If the policy is to be eager wrt the hierarchy subsumptions, no matter the support, this is the way to go
if (this.policy.equals(SubsumptionHierarchyMarkingPolicy.EAGER_ON_HIERARCHY_OVER_SUPPORT)) {
if (currCon.getConstraintWhichThisIsBasedUpon() == null) {
markGenealogyAsRedundant(currCon.suggestConstraintWhichThisShouldBeBasedUpon(), currCon, null, constraintsBag);
} else {
markGenealogyAsRedundant(currCon.getConstraintWhichThisIsBasedUpon(), currCon, null, constraintsBag);
}
} else {
// Otherwise, eliminate those constraints that are in the hierarchy behind the current one, if...
if (currCon.hasConstraintToBaseUpon()) {
// ... if the current one has the same support of all others
if (currCon.isMoreInformativeThanGeneric()) {
logger.trace(
"Removing the genealogy of {1}, starting with {0}, because {1} is subsumed by {0} and more informative than the whole genalogy",
currCon.getConstraintWhichThisIsBasedUpon(),
currCon
);
markGenealogyAsRedundant(currCon.getConstraintWhichThisIsBasedUpon(), currCon, null, constraintsBag);
} else {
// If we want to be "conservative" (namely, a higher support justifies the removal of more strict constraints, this is the way to go
if (this.policy.equals(SubsumptionHierarchyMarkingPolicy.EAGER_ON_SUPPORT_OVER_HIERARCHY)) {
logger.trace(
"Removing {0} because {1} has a higher support and {0} is subsumed by it",
currCon,
currCon.getConstraintWhichThisIsBasedUpon());
// this.markAsRedundant(currCon);
}
}
}
}
if (currCon.getSubFamily() == RelationConstraintSubFamily.COUPLING) {
coExiCon = (MutualRelationConstraint) currCon;
if (this.policy.equals(SubsumptionHierarchyMarkingPolicy.EAGER_ON_HIERARCHY_OVER_SUPPORT)) {
try {
this.markAsRedundant(coExiCon.getForwardConstraint());
} catch (NullPointerException e) {
this.markAsRedundant(coExiCon.getPossibleForwardConstraint());
}
try {
this.markAsRedundant(coExiCon.getBackwardConstraint());
} catch (NullPointerException e) {
this.markAsRedundant(coExiCon.getPossibleBackwardConstraint());
}
} else {
if (coExiCon.hasImplyingConstraints()) {
if (coExiCon.isAsInformativeAsTheImplyingConstraints()) {
logger.trace("Removing {0}" +
", which is the forward, and {1}" +
", which is the backward, because {2}" +
" is the Mutual Relation referring to them and more informative",
coExiCon.getForwardConstraint(),
coExiCon.getBackwardConstraint(),
coExiCon);
// constraintsBag.remove(key, coExiCon.getForwardConstraint());
this.markAsRedundant(coExiCon.getForwardConstraint());
// constraintsBag.remove(key, coExiCon.getBackwardConstraint());
this.markAsRedundant(coExiCon.getBackwardConstraint());
// } else if (coExiCon.isMoreReliableThanAnyOfImplyingConstraints()){
// // Remove the weaker, if any
// if (coExiCon.isMoreReliableThanForwardConstraint()) {
// nuBag.remove(key, coExiCon.getForwardConstraint());
// } else {
// nuBag.remove(key, coExiCon.getBackwardConstraint());
// }
} else {
if (this.policy.equals(SubsumptionHierarchyMarkingPolicy.EAGER_ON_SUPPORT_OVER_HIERARCHY)) {
// constraintsBag.remove(key, coExiCon);
this.markAsRedundant(coExiCon);
}
}
}
}
}
if (currCon.getSubFamily() == RelationConstraintSubFamily.NEGATIVE) {
noRelCon = (NegativeRelationConstraint) currCon;
if (noRelCon.hasOpponent()) {
if (noRelCon.isMoreReliableThanTheOpponent()) {
logger.trace("Removing {0}" +
" because {1} is the opponent of {0}" +
" but less supported",
noRelCon.getOpponent(),
noRelCon);
// constraintsBag.remove(key, noRelCon.getOpponent());
this.markAsRedundant(noRelCon.getOpponent());
} else {
logger.trace("Removing {0}" +
" because {0} is the opponent of {1}" +
" but less supported",
noRelCon,
noRelCon.getOpponent());
// constraintsBag.remove(key, noRelCon);
this.markAsRedundant(noRelCon);
}
}
}
}
}
// }
this.checking = false;
return constraintsBag;
}
private ConstraintsBag markGenealogyAsRedundant(
Constraint lastSon,
Constraint lastSurvivor,
TaskChar key,
ConstraintsBag genealogyTree) {
// TODO check if the EDITS produce any conflicts with the normal simplifier
Constraint genealogyDestroyer = lastSon;
// ConstraintImplicationVerse destructionGeneratorsFamily = lastSurvivor.getSubFamily();
while (genealogyDestroyer != null) {
key = genealogyDestroyer.getBase().getFirstTaskChar();
this.markAsRedundant(genealogyDestroyer);
genealogyTree.replace(key, genealogyDestroyer);
if (genealogyDestroyer.getConstraintWhichThisIsBasedUpon() == null) {
genealogyDestroyer = genealogyDestroyer.suggestConstraintWhichThisShouldBeBasedUpon();
} else {
genealogyDestroyer = genealogyDestroyer.getConstraintWhichThisIsBasedUpon();
}
}
return genealogyTree;
}
private void markAsRedundant(Constraint constraint) {
if (!constraint.isRedundant()) {
constraint.setRedundant(true);
this.numberOfMarkedConstraints++;
}
}
public SubsumptionHierarchyMarkingPolicy getPolicy() {
return policy;
}
public void setPolicy(SubsumptionHierarchyMarkingPolicy policy) {
this.policy = policy;
}
public void printComputationStats(long before, long after) {
if (this.isChecking()) {
throw new IllegalStateException("Subsumption-hierarchy-based check in progress");
}
StringBuffer
csvSummaryBuffer = new StringBuffer(),
csvSummaryLegendBuffer = new StringBuffer(),
csvSummaryComprehensiveBuffer = new StringBuffer();
csvSummaryBuffer.append(SubsumptionHierarchyMarker.HIERARCHY_CODE);
csvSummaryLegendBuffer.append("'Operation code'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.constraintsBag.howManyConstraints());
csvSummaryLegendBuffer.append("'Input constraints'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.getNumberOfMarkedConstraints());
csvSummaryLegendBuffer.append("'Marked constraints'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(after - before);
csvSummaryLegendBuffer.append("'Time'");
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append(";");
csvSummaryComprehensiveBuffer.append("\n\nSubsumption-hierarchy-based pruning: \n");
csvSummaryComprehensiveBuffer.append(csvSummaryLegendBuffer.toString());
csvSummaryComprehensiveBuffer.append("\n");
csvSummaryComprehensiveBuffer.append(csvSummaryBuffer.toString());
logger.info(csvSummaryComprehensiveBuffer.toString());
}
public ConstraintsBag getFixpointConstraintsBag() {
return fixpointConstraintsBag;
}
public void setFixpointConstraintsBag(ConstraintsBag fixpointConstraintsBag) {
this.fixpointConstraintsBag = fixpointConstraintsBag;
}
} | 19,803 | 51.115789 | 168 | java |
Janus | Janus-master/src/minerful/postprocessing/pruning/SubsumptionHierarchyMarkingPolicy.java | package minerful.postprocessing.pruning;
public enum SubsumptionHierarchyMarkingPolicy {
/*
* Privileges the hierarchy (eager policy):
* for example, if the model contains AlternatePrecedence(A, B) and Precedence(A, B),
* the latter is pruned out.
*/
EAGER_ON_HIERARCHY_OVER_SUPPORT,
/*
* Privileges the support (eager policy):
* for example, if the model contains AlternatePrecedence(A, B) and Precedence(A, B),
* and AlternatePrecedence(A, B) has a support of 0.89
* whereas Precedence(A, B) has a support of 0.9,
* then AlternatePrecedence(A, B) is pruned out.
*/
EAGER_ON_SUPPORT_OVER_HIERARCHY,
/*
* Prunes only subsuming constraints, and only if ALL the subsuming ones in the whole hierarchy have the same support
*/
CONSERVATIVE
} | 768 | 33.954545 | 118 | java |
Janus | Janus-master/src/minerful/postprocessing/pruning/ThresholdsMarker.java | package minerful.postprocessing.pruning;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.log4j.Logger;
import minerful.concept.TaskChar;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
public class ThresholdsMarker {
public static final String THRESHOLDS_CHECK_CODE = "'Th-check'";
@XmlTransient
private static Logger logger = Logger.getLogger(ThresholdsMarker.class.getCanonicalName());
private int numberOfMarkedConstraints = 0;
private boolean checking = false;
public int getNumberOfMarkedConstraints() {
return numberOfMarkedConstraints;
}
public boolean isChecking() {
return checking;
}
private ConstraintsBag constraintsBag = null;
private ConstraintsBag fixpointConstraintsBag = null;
public ThresholdsMarker(ConstraintsBag constraintsBag) {
this.constraintsBag = constraintsBag;
}
public ThresholdsMarker(ConstraintsBag constraintsBag, ConstraintsBag fixpointConstraintsBag) {
this(constraintsBag);
this.fixpointConstraintsBag = fixpointConstraintsBag;
}
public ConstraintsBag markConstraintsBelowSupportThreshold(double supportThreshold) {
return markConstraintsBelowThresholds(supportThreshold, Constraint.DEFAULT_CONFIDENCE, Constraint.DEFAULT_INTEREST_FACTOR);
}
public ConstraintsBag markConstraintsBelowThresholds(double supportThreshold, double confidence, double interest) {
for (TaskChar key : constraintsBag.getTaskChars()) {
for (Constraint con : constraintsBag.getConstraintsOf(key)) {
if( fixpointConstraintsBag!=null && fixpointConstraintsBag.getAllConstraints().contains(con)){
// TODO probably inefficient. make a function constraintBag.contains(Constraint c) which does not force to retrieve them all each time
continue;
}
con.setBelowSupportThreshold(!con.hasSufficientSupport(supportThreshold));
con.setBelowConfidenceThreshold(!con.hasSufficientConfidence(confidence));
con.setBelowInterestFactorThreshold(!con.hasSufficientInterestFactor(interest));
if (con.isBelowSupportThreshold() || con.isBelowConfidenceThreshold() || con.isBelowInterestFactorThreshold()) {
this.numberOfMarkedConstraints++;
}
}
}
return constraintsBag;
}
public void printComputationStats(long before, long after) {
if (this.isChecking()) {
throw new IllegalStateException("Subsumption-hierarchy-based check in progress");
}
StringBuffer
csvSummaryBuffer = new StringBuffer(),
csvSummaryLegendBuffer = new StringBuffer(),
csvSummaryComprehensiveBuffer = new StringBuffer();
csvSummaryBuffer.append(ThresholdsMarker.THRESHOLDS_CHECK_CODE);
csvSummaryLegendBuffer.append("'Operation code'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.constraintsBag.howManyConstraints());
csvSummaryLegendBuffer.append("'Input constraints'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(this.getNumberOfMarkedConstraints());
csvSummaryLegendBuffer.append("'Marked constraints'");
csvSummaryBuffer.append(";");
csvSummaryLegendBuffer.append(";");
// --------------------------------
csvSummaryBuffer.append(after - before);
csvSummaryLegendBuffer.append("'Time'");
// csvSummaryBuffer.append(";");
// csvSummaryLegendBuffer.append(";");
csvSummaryComprehensiveBuffer.append("\n\nThresholds-based pruning: \n");
csvSummaryComprehensiveBuffer.append(csvSummaryLegendBuffer.toString());
csvSummaryComprehensiveBuffer.append("\n");
csvSummaryComprehensiveBuffer.append(csvSummaryBuffer.toString());
logger.info(csvSummaryComprehensiveBuffer.toString());
}
} | 4,219 | 41.2 | 153 | java |
Janus | Janus-master/src/minerful/reactive/automaton/AToken.java | package minerful.reactive.automaton;
import dk.brics.automaton.State;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
/**
* Class to manage a set of token of different future automata related by the same activation.
*/
public class AToken {
private Set<State> tokensCollection;
/**
* Initialize an empty AToken
*/
public AToken() {
this.tokensCollection = new TreeSet<State>();
}
/**
* Initialize an AToken with a collection of state pointers
*
* @param tokensCollection
*/
public AToken(Set<State> tokensCollection) {
this.tokensCollection = tokensCollection;
}
/**
* @return the token collection
*/
public Set<State> getTokensCollection() {
return tokensCollection;
}
/**
* @param token new token
*/
public void addTokenToCollection(State token) {
this.tokensCollection.add(token);
}
/**
* @param token to be removed
*/
public void removeTokenFromCollection(State token) {
this.tokensCollection.remove(token);
}
}
| 1,117 | 20.09434 | 94 | java |
Janus | Janus-master/src/minerful/reactive/automaton/ATokenRunner.java | package minerful.reactive.automaton;
import dk.brics.automaton.State;
/**
* Class to run traces over a set of tokens (AToken!) related to the same activation
*/
public class ATokenRunner {
private AToken aToken;
/**
* Initialize a runner for a specific AToken
*
* @param aToken object containing the tokens related to an activation
*/
public ATokenRunner(AToken aToken) {
this.aToken = aToken;
}
/**
* @return the AToken associated to the runner
*/
public AToken getaToken() {
return aToken;
}
/**
* move the tokens of one step according to the given transition
*
* @param transition transition to perform
*/
public void step(char transition) {
AToken newAToken = new AToken();
for (State token : aToken.getTokensCollection()) {
newAToken.addTokenToCollection(token.step(transition));
}
aToken = newAToken;
}
/**
* Retrieve the result of AToken set in the current state
*
* @return true is at least one token is in accepting state
*/
public boolean getCurrentResult() {
for (State token : aToken.getTokensCollection()) {
if (token.isAccept()) return true;
}
return false;
}
}
| 1,304 | 23.166667 | 84 | java |
Janus | Janus-master/src/minerful/reactive/automaton/ConjunctAutomata.java | package minerful.reactive.automaton;
import dk.brics.automaton.Automaton;
/**
* Parametric conjunct Automata
*/
public class ConjunctAutomata {
private Automaton pastAutomaton;
private Automaton presentAutomaton;
private Automaton futureAutomaton;
/**
* Separation theorem result may be a disjunction of separated automata.
* Put null if one of the automaton is not present
*/
public ConjunctAutomata(Automaton pastAutomaton, Automaton presentAutomaton, Automaton futureAutomaton) {
this.pastAutomaton = pastAutomaton;
this.presentAutomaton = presentAutomaton;
this.futureAutomaton = futureAutomaton;
// TODO optimization of automata (e.g. minimization, completion, ...)
}
public Automaton getPastAutomaton() {
return pastAutomaton;
}
public Automaton getPresentAutomaton() {
return presentAutomaton;
}
public Automaton getFutureAutomaton() {
return futureAutomaton;
}
/**
* @return true if the past automaton is present, false otherwise
*/
public boolean hasPast() {
return pastAutomaton != null;
}
/**
* @return true if the present automaton is present, false otherwise
*/
public boolean hasPresent() {
return presentAutomaton != null;
}
/**
* @return true if the future automaton is present, false otherwise
*/
public boolean hasFuture() {
return futureAutomaton != null;
}
}
| 1,500 | 24.440678 | 109 | java |
Janus | Janus-master/src/minerful/reactive/automaton/ConjunctAutomataOfflineRunner.java | package minerful.reactive.automaton;
import dk.brics.automaton.*;
import java.util.*;
/**
* Object to run a trace over conjunct automata
*/
public class ConjunctAutomataOfflineRunner {
private ConjunctAutomata automata;
private Collection<Character> alphabet;
private State currentPastState = null;
private State currentPresentState = null;
private State currentFutureState = null;
private State initialPastState = null;
private State initialPresentState = null;
private State initialFutureState = null;
/**
* Initialize a runner for a given conjunct automata
* @param automata Conjunct Automata to be run
* @param alphabet
*/
public ConjunctAutomataOfflineRunner(ConjunctAutomata automata, Collection<Character> alphabet) {
this.automata = automata;
this.alphabet =alphabet;
if (automata.hasPast()) {
this.initialPastState = automata.getPastAutomaton().getInitialState();
this.currentPastState = this.initialPastState;
}
if (automata.hasPresent()) {
this.initialPresentState = automata.getPresentAutomaton().getInitialState();
this.currentPresentState = this.initialPresentState;
}
if (automata.hasFuture()) {
// Reversed future for offline settings
Automaton newFut = Utils.getReversedAutomaton(automata.getFutureAutomaton(), (Set) alphabet);
this.initialFutureState = newFut.getInitialState();
this.currentFutureState = this.initialFutureState;
}
}
/**
* replay a trace on the automata and return a vector with the acceptance of each state
*
* @param trace trace as char[] to be evaluate by the conjunct automata.
*/
public boolean[] evaluateTrace(char[] trace, int traceLength, Map<Character, Character> parametricMapping) {
boolean[] result = new boolean[traceLength];
Arrays.fill(result, Boolean.TRUE);
for (int i = 0; i < traceLength; i++) {
char transition_onward = parametricMapping.getOrDefault(trace[i], 'z');
// PAST
if (currentPastState != null) {
currentPastState = currentPastState.step(transition_onward);
result[i] &= currentPastState.isAccept();
}
// PRESENT
if (currentPresentState != null) {
currentPresentState = currentPresentState.step(transition_onward);
result[i] &= currentPresentState.isAccept();
}
// FUTURE (backward)
if (currentFutureState != null) {
currentFutureState = currentFutureState.step(parametricMapping.getOrDefault(trace[traceLength - 1 - i], 'z'));
result[traceLength - 1 - i] &= currentFutureState.isAccept();
}
}
return result;
}
/**
* Reset the automata state to make it ready for a new trace
*/
public void reset() {
this.currentPastState = this.initialPastState;
this.currentPresentState = this.initialPresentState;
this.currentFutureState = this.initialFutureState;
}
}
| 3,218 | 33.244681 | 126 | java |
Janus | Janus-master/src/minerful/reactive/automaton/ConjunctAutomataRunner.java | package minerful.reactive.automaton;
import dk.brics.automaton.State;
/**
* Object to run a trace over conjunct automata
*/
public class ConjunctAutomataRunner {
private ConjunctAutomata automata;
private State currentPastState = null;
private State currentPresentState = null;
// This collection it cannot be a set as different token in same position must be considered separately
// private List<State> currentFutureTokens = null; //ATokens!!!
/**
* Initialize a runner for a given conjunct automata
*
* @param automata Conjunct Automata to be run
*/
public ConjunctAutomataRunner(ConjunctAutomata automata) {
this.automata = automata;
if (automata.hasPast()) this.currentPastState = automata.getPastAutomaton().getInitialState();
if (automata.hasPresent()) this.currentPresentState = automata.getPresentAutomaton().getInitialState();
}
/**
* Perform a single step in the automata using the given transition
*/
public void step(char transition) {
// PAST step
if (currentPastState != null) {
currentPastState = currentPastState.step(transition);
}
// PRESENT step not needed because it's only evaluated from its starting state at evaluation/activation time
// (see getCurrentResult method)
// Future Research: present violation imply activation?
// FUTURE step carried out by AToken object
}
/**
* @return current state pointer of the past automaton
*/
public State getCurrentPastState() {
return currentPastState;
}
/**
* Add a new token in starting state of future automaton and give the reference to AToken object
*
* @return the new token state
*/
public State getAToken() {
if (automata.hasFuture()) {
// TODO BEWARE! side effect on State object? AKA: are we giving to each one the same object reference?
return automata.getFutureAutomaton().getInitialState();
} else return null;
}
/**
* Reset the automata state to make it ready for a new trace
*/
public void reset() {
if (automata.hasPast()) this.currentPastState = automata.getPastAutomaton().getInitialState();
if (automata.hasPresent()) this.currentPresentState = automata.getPresentAutomaton().getInitialState();
}
/**
* If no future automaton is present is possible to retrieve the result of this conjunct automata immediately
*
* @return true if in the current state it is possible to have a certain result
*/
public boolean hasClearResult() {
return !automata.hasFuture();
}
/**
* BEWARE before calling this method be sure to check hasClearResult() returns true
*
* @return true is the current state is accepting, false otherwise
*/
public boolean getCurrentResult(char transition) {
Boolean res = true;
if (automata.hasPast()) {
res = res && currentPastState.isAccept();
}
if (automata.hasPresent()) {
res = res && currentPresentState.step(transition).isAccept();
}
return res;
}
}
| 3,218 | 31.846939 | 115 | java |
Janus | Janus-master/src/minerful/reactive/automaton/SeparatedAutomaton.java | package minerful.reactive.automaton;
import dk.brics.automaton.Automaton;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Parametric Separated Automaton (i.e. disjunction of conjunction of past/present/future automata)
*/
public class SeparatedAutomaton {
private char[] parametricAlphabet;
private Set<ConjunctAutomata> disjunctAutomata; //memo. Separation theorem result is a disjunction of separated automata
private Automaton activator;
private String nominalID;
/* TODO for Version > 0.1 @Alessio
* The activator is represented as automaton for future extensions.
* In this version should be way better to directly check if
* the current trace character is equal to the activation one.
* */
/**
* Initialize an empty separated automaton
*/
public SeparatedAutomaton() {
this.disjunctAutomata = new HashSet<ConjunctAutomata>();
}
/**
* Initialize a separated automaton with an activator automaton
*
* @param activator automaton
*/
public SeparatedAutomaton(Automaton activator) {
this.activator = activator;
this.disjunctAutomata = new HashSet<ConjunctAutomata>();
}
/**
* Initialize a separated automaton with a lis of conjunct automata and an activator
*
* @param disjunctionOf collection of conjunct automata, in disjunction within this separated automaton
* @param activator automaton
*/
public SeparatedAutomaton(Automaton activator, List<ConjunctAutomata> disjunctionOf) {
this.activator = activator;
this.disjunctAutomata = new HashSet<ConjunctAutomata>();
this.disjunctAutomata.addAll(disjunctionOf);
}
/**
* Initialize a separated automaton with a lis of conjunct automata, an activator, and the parametric alphabet
*
* @param disjunctionOf collection of conjunct automata, in disjunction within this separated automaton
* @param activator automaton
*/
public SeparatedAutomaton(Automaton activator, List<ConjunctAutomata> disjunctionOf, char[] parametricAlphabet) {
this.activator = activator;
this.disjunctAutomata = new HashSet<ConjunctAutomata>();
this.disjunctAutomata.addAll(disjunctionOf);
this.parametricAlphabet = parametricAlphabet;
}
/**
* @return Set of disjunct automata
*/
public Set<ConjunctAutomata> getDisjunctAutomata() {
return disjunctAutomata;
}
/**
* Ad a new conjunct automata to the disjunction set
*
* @param newConjunction triple of conjunct automata to be added
*/
public void addDisjunctionAutomata(ConjunctAutomata newConjunction) {
this.disjunctAutomata.add(newConjunction);
}
/**
* @return activator automaton
*/
public Automaton getActivator() {
return activator;
}
/**
* @param activator new activator automaton
*/
public void setActivator(Automaton activator) {
this.activator = activator;
}
/**
* @return ordered list of the characters of the alphabet used by the parametric automaton
*/
public char[] getParametricAlphabet() {
return parametricAlphabet;
}
/**
* @param parametricAlphabet ordered list of character to be used by the parametric automaton
*/
public void setParametricAlphabet(char[] parametricAlphabet) {
this.parametricAlphabet = parametricAlphabet;
}
/**
* @return nominal name of the automaton is set
*/
@Override
public String toString() {
if (nominalID != null) {
return nominalID;
} else {
return super.toString();
}
}
/**
* @return nominal ID of the automaton
*/
public String getNominalID() {
return this.nominalID;
}
/**
* "human" Name of the automaton for toString function
* @param nominalID new nominal ID of the automaton
*/
public void setNominalID(String nominalID) {
this.nominalID = nominalID;
}
}
| 4,113 | 28.597122 | 124 | java |
Janus | Janus-master/src/minerful/reactive/automaton/SeparatedAutomatonOfflineRunner.java | package minerful.reactive.automaton;
import dk.brics.automaton.State;
import java.util.*;
/**
* Object to run a trace over a separated automata in offline setting.
* This is done in O(2n) time through the double versersion technique, i.e., one past going forward and one future going backward.
* It is assumed that the automata are already properly reversed.
*/
public class SeparatedAutomatonOfflineRunner {
private SeparatedAutomaton automaton;
// REMEMBER that separated automaton is a disjunction of conjunction!!!
private List<ConjunctAutomataOfflineRunner> disjunctAutomataOfflineRunners; // it takes care of past and present
private List<Character> specificAlphabet;
private Map<Character, Character> parametricMapping;
/**
* Initialize a runner object to run trace on a given separated automaton.
* For each disjunct automata of the spared automaton is initialized a specific runner
*
* @param automaton on which running the analysis
* @param specificAlphabet ordered array of character from the trace to be used in the parametric automaton
*/
public SeparatedAutomatonOfflineRunner(SeparatedAutomaton automaton, List<Character> specificAlphabet) {
this.automaton = automaton;
this.disjunctAutomataOfflineRunners = new ArrayList<ConjunctAutomataOfflineRunner>();
this.parametricMapping = new HashMap<Character, Character>();
this.specificAlphabet = specificAlphabet;
char[] par = automaton.getParametricAlphabet();
for (int i = 0; i < specificAlphabet.size(); i++) {
parametricMapping.put(specificAlphabet.get(i), par[i]);
}
// (parametric)Alphabet required in order to reverse the future automata
LinkedHashSet<Character> alphabet = new LinkedHashSet<>(parametricMapping.values());
for (ConjunctAutomata ca : automaton.getDisjunctAutomata()) {
this.disjunctAutomataOfflineRunners.add(new ConjunctAutomataOfflineRunner(ca, alphabet));
}
}
/**
* run the separatedAutomaton on the given trace
*/
public void runTrace(char[] trace, int traceLength, byte[] result) {
// Target
for (ConjunctAutomataOfflineRunner car : disjunctAutomataOfflineRunners) {
int i = 0;
for (boolean eval : car.evaluateTrace(trace, traceLength, parametricMapping)) {
result[i] |= (eval) ? 1 : 0;
i++;
}
}
// Activation
State activatorPointer = automaton.getActivator().getInitialState();
for (int i = 0; i < traceLength; i++) {
char transition_onward = parametricMapping.getOrDefault(trace[i], 'z');
activatorPointer = activatorPointer.step(transition_onward);
result[i] += (activatorPointer.isAccept()) ? 2 : 0; // we are adding the second bit on the left, i.e., [activator-bit][target-bit]
}
}
/**
* Reset the automaton state to make it ready for a new trace
*/
public void reset() {
for (ConjunctAutomataOfflineRunner car : disjunctAutomataOfflineRunners) {
car.reset();
}
}
/**
* @return nominal name of the automaton concatenated with the specific letter used
*/
@Override
public String toString() {
StringBuffer a = new StringBuffer("(");
for (char c : specificAlphabet) {
a.append(c + ",");
}
return automaton.toString() + a.substring(0, a.length() - 1) + ")";
}
/**
* @return nominal name of the automaton concatenated with the specific letters substituted with the real events
*/
public String toStringDecoded(Map map) {
StringBuffer a = new StringBuffer("(");
for (char c : specificAlphabet) {
a.append(map.get(c) + ",");
}
return automaton.toString() + a.substring(0, a.length() - 1) + ")";
}
/**
* Get the Separated automaton object of the runner
*
* @return
*/
public SeparatedAutomaton getAutomaton() {
return automaton;
}
}
| 4,147 | 36.035714 | 142 | java |
Janus | Janus-master/src/minerful/reactive/automaton/SeparatedAutomatonRunner.java | package minerful.reactive.automaton;
import dk.brics.automaton.State;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Object to run a trace over a separated automata
*/
public class SeparatedAutomatonRunner {
private SeparatedAutomaton automaton;
private State activatorPointer;
// REMEMBER that separated automaton is a disjunction of conjunction!!!
private List<ConjunctAutomataRunner> disjunctAutomataRunners; //ATokens!!!
private List<ATokenRunner> aTokensRunners;
private int activationCounter;
private int fulfilledActivationCounter;
private List<Character> specificAlphabet;
private Map<Character, Character> parametricMapping;
/**
* Initialize a runner object to run trace on a given separated automaton.
* For each disjunct automata of the spared automaton is initialized a specific runner
*
* @param automaton on which running the analysis
* @param specificAlphabet ordered array of character from the trace to be used in the parametric automaton
*/
public SeparatedAutomatonRunner(SeparatedAutomaton automaton, List<Character> specificAlphabet) {
this.automaton = automaton;
this.disjunctAutomataRunners = new ArrayList<ConjunctAutomataRunner>();
this.aTokensRunners = new ArrayList<ATokenRunner>();
this.parametricMapping = new HashMap<Character, Character>();
this.specificAlphabet = specificAlphabet;
char[] par = automaton.getParametricAlphabet();
for (int i = 0; i < specificAlphabet.size(); i++) {
parametricMapping.put(specificAlphabet.get(i), par[i]);
}
// it is better to put the present automaton as first of the list for performance speedup
// BUT pasts must be carried on any way
for (ConjunctAutomata ca : automaton.getDisjunctAutomata()) {
this.disjunctAutomataRunners.add(new ConjunctAutomataRunner(ca));
}
this.activationCounter = 0;
this.fulfilledActivationCounter = 0;
this.activatorPointer = automaton.getActivator().getInitialState();
}
/**
* @return Number of total activation of the constraint
*/
public int getActivationCounter() {
return activationCounter;
}
/**
* @return Number of fulfilled activation
*/
public int getFulfilledActivationCounter() {
return fulfilledActivationCounter;
}
/**
* The current degree of truth of the constraint represented by the separated automaton and the trace run on it.
* It is the ration between the fulfilled activation and the total number of activations.
*
* @return support of the constraint represented by the separated automaton wrt the given trace.
*/
public double getDegreeOfTruth() {
// TODO full support formula
if (activationCounter == 0) {
return 0.0;
}
int aTokenFullfilled = 0;
for (ATokenRunner atr : aTokensRunners) {
if (atr.getCurrentResult()) aTokenFullfilled++;
}
return (double) (fulfilledActivationCounter + aTokenFullfilled) / activationCounter;
}
/**
* @return True if the constraint has been activated at least nce in the trace
*/
public boolean isActivated() {
return (activationCounter != 0);
}
/**
* Perform a single step in the separated automata using the given transition
*/
public void step(char realTransition) {
// MEMO. we are using a parametric automaton:
// the transition from the real trace must be translated into the generic one
// getOrDefault java8 required
// TODO parametrize the default character instead of hardcoding
char transition = parametricMapping.getOrDefault(realTransition, 'z');
this.activatorPointer = this.activatorPointer.step(transition);
// Activation step
if (this.activated(transition)) {
activationCounter++;
AToken standReadyAToken = new AToken();
ArrayList<State> standReadyATokensTemp = new ArrayList<>();
boolean solved = false;
boolean unclear = false;
for (ConjunctAutomataRunner car : disjunctAutomataRunners) {
// step in the past (anyway)
car.step(transition);
// if we can retrieve a clear positive result no need for AToken to be launched
if (!solved) {
/* TODO for Version > 0.1 @Alessio
* Not suitable for parallel computation, force to a semaphore check at each step
* */
if (car.hasClearResult()) {
if (car.getCurrentResult(transition)) {
solved = true;
fulfilledActivationCounter++;
}
} else {
unclear = true; //if at least one solution need to be checked in the future (if no positive certain answer)
// standReadyAToken.addTokenToCollection(car.getAToken());
// check the future of the conjunction only if past and present are ok (or absent)
if(car.getCurrentResult(transition)){
standReadyATokensTemp.add(car.getAToken());
}
}
}
}
// If no positive certain result and at least one result need to be checked in the future, launch AToken!
if (!solved && unclear) {
for (State s : standReadyATokensTemp) {
standReadyAToken.addTokenToCollection(s);
}
aTokensRunners.add(new ATokenRunner(standReadyAToken));
}
// ATokens Step in the future
for (ATokenRunner a : aTokensRunners) {
a.step(transition);
}
} else {
// step in the past
for (ConjunctAutomataRunner car : disjunctAutomataRunners) {
car.step(transition);
}
// ATokens Step in the future
for (ATokenRunner a : aTokensRunners) {
a.step(transition);
/* TODO for Version > 0.1 @Alessio
* Check if a token ends up in a permanent violation/satisfaction state.
* - satisfaction: stop all tokens and return positive result
* - violation: remove token. if no token remaining return negative result
* */
}
}
}
private boolean activated(char transition) {
// return automaton.getActivator().getInitialState().step(transition).isAccept();
return activatorPointer.isAccept();
}
/**
* Reset the automaton state to make it ready for a new trace
*/
public void reset() {
activationCounter = 0;
fulfilledActivationCounter = 0;
aTokensRunners = new ArrayList<ATokenRunner>();
for (ConjunctAutomataRunner car : disjunctAutomataRunners) {
car.reset();
}
activatorPointer = automaton.getActivator().getInitialState();
}
/**
* @return nominal name of the automaton concatenated with the specific letter used
*/
@Override
public String toString() {
StringBuffer a = new StringBuffer("(");
for (char c : specificAlphabet) {
a.append(c + ",");
}
return automaton.toString() + a.substring(0, a.length() - 1) + ")";
}
}
| 6,561 | 31.166667 | 113 | java |
Janus | Janus-master/src/minerful/reactive/automaton/Utils.java | package minerful.reactive.automaton;
import dk.brics.automaton.*;
import java.util.*;
public class Utils {
/**
* Returns the automaton accepting the reversed language of the input automaton.
*
* @param inAutomaton
* @param alphabet
* @return
*/
public static Automaton getReversedAutomaton(Automaton inAutomaton, Set<Character> alphabet) {
// Init result automaton as NFA (because the reversion result of a DFA is a NFA)
Automaton result = new Automaton();
result.setDeterministic(false);
// automaton reversion
State resInitialState = new State();
resInitialState.setAccept(true);
result.setInitialState(resInitialState);
Set<State> newNFAInitStates = SpecialOperations.reverse(inAutomaton);
Collection<StatePair> initialSet = new LinkedList<>();
for (State init : newNFAInitStates) {
initialSet.add(new StatePair(resInitialState, init));
}
result.addEpsilons(initialSet); //the initial state of the NFA is a state with epsilon transitions to all the initial states
// determinize NFA
BasicOperations.determinize(result);
// automaton completion
completeAutomaton(result, alphabet);
return result;
}
/**
* Complete the input automaton (side effect).
* An automaton is completed when each state has a transition for each word of the alphabet
*
* @param inAutomaton
* @param alphabet
*/
public static void completeAutomaton(Automaton inAutomaton, Set<Character> alphabet) {
// add default character to the alphabet
alphabet.add('z'); // TODO make it parametric somewhere instead of hardcoding
// initializing sink node
State sink = new State();
for (char transition : alphabet) {
sink.addTransition(new Transition(transition, sink));
}
for (State st : inAutomaton.getStates()) {
Set<Character> stateTransitions = new TreeSet<>();
for (Transition trans : st.getTransitions()) {
stateTransitions.add(trans.getMin());
}
for (char transition : alphabet) {
if (stateTransitions.contains(transition)) continue;
st.addTransition(new Transition(transition, sink));
}
}
}
/**
* Returns an automaton accepting only if the transition is equal to a specific activator character
*
* @param activator parametric character representing the activator in the parametric automaton
* @param others all the parametric characters of the alphabet but the activator
* @return activator automaton
*/
public static Automaton getSingleCharActivatorAutomaton(char activator, char[] others) {
State accepting = new State();
accepting.setAccept(true);
State notAccepting = new State();
notAccepting.addTransition(new Transition(activator, accepting));
accepting.addTransition(new Transition(activator, accepting));
for (char o : others) {
notAccepting.addTransition(new Transition(o, notAccepting));
accepting.addTransition(new Transition(o, notAccepting));
}
Automaton res = new Automaton();
res.setInitialState(notAccepting);
return res;
}
/**
* Returns an automaton accepting only at the beginning of the trace
*
* @param all all the parametric characters of the alphabet
* @return activator automaton
*/
public static Automaton getExistentialActivatorAutomaton(char[] all) {
State initial = new State();
initial.setAccept(true);
State accepting = new State();
accepting.setAccept(true);
State notAccepting = new State();
for (char o : all) {
initial.addTransition(new Transition(o, accepting));
accepting.addTransition(new Transition(o, notAccepting));
notAccepting.addTransition(new Transition(o, notAccepting));
}
Automaton res = new Automaton();
res.setInitialState(initial);
return res;
}
/**
* Returns an automaton accepting only if the transition is contained in a set of activator characters
* e.g. A or B -> C , in this case the constrain is activated is the transition is A or B
*
* @param activators all the parametric characters representing the possible activators in the parametric automaton
* @param others all the parametric characters of the alphabet but the activators
* @return activator automaton
*/
public static Automaton getMultiCharActivatorAutomaton(char[] activators, char[] others) {
State accepting = new State();
accepting.setAccept(true);
State notAccepting = new State();
for (char activator : activators
) {
notAccepting.addTransition(new Transition(activator, accepting));
accepting.addTransition(new Transition(activator, accepting));
}
for (char o : others) {
notAccepting.addTransition(new Transition(o, notAccepting));
accepting.addTransition(new Transition(o, notAccepting));
}
Automaton res = new Automaton();
res.setInitialState(notAccepting);
return res;
}
/**
* Get the automaton representing the <>A eventuality constraint for a desired letter of an alphabet
*
* @param desired desired character
* @param others alphabet without the desired character
* @return automaton for <>desired
*/
public static Automaton getEventualityAutomaton(char desired, char[] others) {
State NonAcceptingState = new State();
State AcceptingState = new State();
AcceptingState.setAccept(true);
NonAcceptingState.addTransition(new Transition(desired, AcceptingState));
for (char other : others) {
NonAcceptingState.addTransition(new Transition(other, NonAcceptingState));
}
AcceptingState.addTransition(new Transition(desired, AcceptingState));
for (char other : others) {
AcceptingState.addTransition(new Transition(other, AcceptingState));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingState);
return resAutomaton;
}
/**
* Get the automaton representing the !<>A negative eventuality constraint for a desired letter of an alphabet
*
* @param desired desired character
* @param others alphabet without the desired character
* @return automaton for <>desired
*/
public static Automaton getNegativeEventualityAutomaton(char desired, char[] others) {
State NonAcceptingState = new State();
State AcceptingState = new State();
AcceptingState.setAccept(true);
AcceptingState.addTransition(new Transition(desired, NonAcceptingState));
for (char other : others) {
AcceptingState.addTransition(new Transition(other, AcceptingState));
}
NonAcceptingState.addTransition(new Transition(desired, NonAcceptingState));
for (char other : others) {
NonAcceptingState.addTransition(new Transition(other, NonAcceptingState));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(AcceptingState);
return resAutomaton;
}
/**
* Get the automaton representing the ()(!<>A) constraint for a desired letter of an alphabet
*
* @param notDesired desired character
* @param others alphabet without the desired character
* @return automaton for ()(!<>desired)
*/
public static Automaton getNextNegativeEventualityAutomaton(char notDesired, char[] others) {
State NonAcceptingStateInitial = new State();
State NonAcceptingStateSink = new State();
State AcceptingState = new State();
AcceptingState.setAccept(true);
NonAcceptingStateInitial.addTransition(new Transition(notDesired, AcceptingState));
for (char other : others) {
NonAcceptingStateInitial.addTransition(new Transition(other, AcceptingState));
}
AcceptingState.addTransition(new Transition(notDesired, NonAcceptingStateSink));
for (char other : others) {
AcceptingState.addTransition(new Transition(other, AcceptingState));
}
NonAcceptingStateSink.addTransition(new Transition(notDesired, NonAcceptingStateSink));
for (char other : others) {
NonAcceptingStateSink.addTransition(new Transition(other, NonAcceptingStateSink));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingStateInitial);
return resAutomaton;
}
/**
* Get the automaton representing the reverse of ()(!<>A) constraint for a desired letter of an alphabet
*
* @param notDesired desired character
* @param others alphabet without the desired character
* @return reversed automaton for ()(!<>desired)
*/
public static Automaton getReversedNextNegativeEventualityAutomaton(char notDesired, char[] others) {
State NonAcceptingStateInitial = new State();
State NonAcceptingStateSink = new State();
State AcceptingStateOk = new State();
State AcceptingStateLast = new State();
AcceptingStateOk.setAccept(true);
AcceptingStateLast.setAccept(true);
NonAcceptingStateInitial.addTransition(new Transition(notDesired, AcceptingStateLast));
for (char other : others) {
NonAcceptingStateInitial.addTransition(new Transition(other, AcceptingStateOk));
}
AcceptingStateOk.addTransition(new Transition(notDesired, AcceptingStateLast));
for (char other : others) {
AcceptingStateOk.addTransition(new Transition(other, AcceptingStateOk));
}
AcceptingStateLast.addTransition(new Transition(notDesired, NonAcceptingStateSink));
for (char other : others) {
AcceptingStateLast.addTransition(new Transition(other, NonAcceptingStateSink));
}
NonAcceptingStateSink.addTransition(new Transition(notDesired, NonAcceptingStateSink));
for (char other : others) {
NonAcceptingStateSink.addTransition(new Transition(other, NonAcceptingStateSink));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingStateInitial);
return resAutomaton;
}
/**
* Get the automaton representing the reverse of !A Until B constraint for two desired letters of an alphabet
*
* @param notHold character to hold false Until halt
* @param halt halting character
* @param others alphabet without the characters involved in the operation
* @return reversed automaton for !A Until B
*/
public static Automaton getReversedNegativeUntilAutomaton(char notHold, char halt, char[] others) {
State NonAcceptingState = new State();
State AcceptingState = new State();
AcceptingState.setAccept(true);
NonAcceptingState.addTransition(new Transition(halt, AcceptingState));
NonAcceptingState.addTransition(new Transition(notHold, NonAcceptingState));
for (char other : others) {
NonAcceptingState.addTransition(new Transition(other, NonAcceptingState));
}
AcceptingState.addTransition(new Transition(notHold, NonAcceptingState));
AcceptingState.addTransition(new Transition(halt, AcceptingState));
for (char other : others) {
AcceptingState.addTransition(new Transition(other, AcceptingState));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingState);
return resAutomaton;
}
/**
* Get the automaton representing the ()(!A Until B) constraint for two desired letters of an alphabet
*
* @param notHold character to hold false Until halt
* @param halt halting character
* @param others alphabet without the characters involved in the operation
* @return automaton for ()(!A Until B)
*/
public static Automaton getNextNegativeUntilAutomaton(char notHold, char halt, char[] others) {
State NonAcceptingState_initial = new State();
State NonAcceptingState = new State();
State NonAcceptingState_sink = new State();
State AcceptingState_b = new State();
AcceptingState_b.setAccept(true);
NonAcceptingState_initial.addTransition(new Transition(halt, NonAcceptingState));
NonAcceptingState_initial.addTransition(new Transition(notHold, NonAcceptingState));
for (char other : others) {
NonAcceptingState_initial.addTransition(new Transition(other, NonAcceptingState));
}
NonAcceptingState.addTransition(new Transition(halt, AcceptingState_b));
NonAcceptingState.addTransition(new Transition(notHold, NonAcceptingState_sink));
for (char other : others) {
NonAcceptingState.addTransition(new Transition(other, NonAcceptingState));
}
NonAcceptingState_sink.addTransition(new Transition(halt, NonAcceptingState_sink));
NonAcceptingState_sink.addTransition(new Transition(notHold, NonAcceptingState_sink));
for (char other : others) {
NonAcceptingState_sink.addTransition(new Transition(other, NonAcceptingState_sink));
}
AcceptingState_b.addTransition(new Transition(halt, AcceptingState_b));
AcceptingState_b.addTransition(new Transition(notHold, AcceptingState_b));
for (char other : others) {
AcceptingState_b.addTransition(new Transition(other, AcceptingState_b));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingState_initial);
return resAutomaton;
}
/**
* Get the automaton representing the reverse of ()(!A Until B) constraint for two desired letters of an alphabet
*
* @param notHold character to hold false Until halt
* @param halt halting character
* @param others alphabet without the characters involved in the operation
* @return reversed automaton for ()(!A Until B)
*/
public static Automaton getReversedNextNegativeUntilAutomaton(char notHold, char halt, char[] others) {
State NonAcceptingState_initial = new State();
State NonAcceptingState_b = new State();
State AcceptingState_b = new State();
State AcceptingState_a = new State();
AcceptingState_b.setAccept(true);
AcceptingState_a.setAccept(true);
NonAcceptingState_initial.addTransition(new Transition(halt, NonAcceptingState_b));
NonAcceptingState_initial.addTransition(new Transition(notHold, NonAcceptingState_initial));
for (char other : others) {
NonAcceptingState_initial.addTransition(new Transition(other, NonAcceptingState_initial));
}
NonAcceptingState_b.addTransition(new Transition(halt, AcceptingState_b));
NonAcceptingState_b.addTransition(new Transition(notHold, AcceptingState_a));
for (char other : others) {
NonAcceptingState_b.addTransition(new Transition(other, AcceptingState_b));
}
AcceptingState_b.addTransition(new Transition(halt, AcceptingState_b));
AcceptingState_b.addTransition(new Transition(notHold, AcceptingState_a));
for (char other : others) {
AcceptingState_b.addTransition(new Transition(other, AcceptingState_b));
}
AcceptingState_a.addTransition(new Transition(halt, NonAcceptingState_b));
AcceptingState_a.addTransition(new Transition(notHold, NonAcceptingState_initial));
for (char other : others) {
AcceptingState_a.addTransition(new Transition(other, NonAcceptingState_initial));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingState_initial);
return resAutomaton;
}
/**
* Get the automaton representing the ()A constraint for the desired letter of an alphabet
*
* @param desired character
* @param others alphabet without the desired character
* @return automaton for ()desired
*/
public static Automaton getNextAutomaton(char desired, char[] others) {
State NonAcceptingState_initial = new State();
State NonAcceptingState_middle = new State();
State NonAcceptingState_sink = new State();
State AcceptingState = new State();
AcceptingState.setAccept(true);
NonAcceptingState_initial.addTransition(new Transition(desired, NonAcceptingState_middle));
for (char other : others) {
NonAcceptingState_initial.addTransition(new Transition(other, NonAcceptingState_middle));
}
NonAcceptingState_middle.addTransition(new Transition(desired, AcceptingState));
for (char other : others) {
NonAcceptingState_middle.addTransition(new Transition(other, NonAcceptingState_sink));
}
AcceptingState.addTransition(new Transition(desired, AcceptingState));
for (char other : others) {
AcceptingState.addTransition(new Transition(other, AcceptingState));
}
NonAcceptingState_sink.addTransition(new Transition(desired, NonAcceptingState_sink));
for (char other : others) {
NonAcceptingState_sink.addTransition(new Transition(other, NonAcceptingState_sink));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingState_initial);
return resAutomaton;
}
/**
* Get the automaton representing the reverse of ()A constraint for the desired letter of an alphabet
*
* @param desired character
* @param others alphabet without the desired character
* @return reversed automaton for ()desired
*/
public static Automaton getReversedNextAutomaton(char desired, char[] others) {
State NonAcceptingState_initial = new State();
State NonAcceptingState_middle = new State();
State AcceptingState_b = new State();
State AcceptingState_c = new State();
AcceptingState_b.setAccept(true);
AcceptingState_c.setAccept(true);
NonAcceptingState_initial.addTransition(new Transition(desired, NonAcceptingState_middle));
for (char other : others) {
NonAcceptingState_initial.addTransition(new Transition(other, NonAcceptingState_initial));
}
NonAcceptingState_middle.addTransition(new Transition(desired, AcceptingState_b));
for (char other : others) {
NonAcceptingState_middle.addTransition(new Transition(other, AcceptingState_c));
}
AcceptingState_b.addTransition(new Transition(desired, AcceptingState_b));
for (char other : others) {
AcceptingState_b.addTransition(new Transition(other, AcceptingState_c));
}
AcceptingState_c.addTransition(new Transition(desired, NonAcceptingState_middle));
for (char other : others) {
AcceptingState_c.addTransition(new Transition(other, NonAcceptingState_initial));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingState_initial);
return resAutomaton;
}
/**
* Get the automaton representing the !()A constraint for the undesired letter of an alphabet
*
* @param undesired character
* @param others alphabet without the undesired character
* @return automaton for !()undesired
*/
public static Automaton getNegativeNextAutomaton(char undesired, char[] others) {
State NonAcceptingState_initial = new State();
State NonAcceptingState_middle = new State();
State AcceptingState = new State();
AcceptingState.setAccept(true);
State NonAcceptingState_sink = new State();
NonAcceptingState_initial.addTransition(new Transition(undesired, NonAcceptingState_middle));
for (char other : others) {
NonAcceptingState_initial.addTransition(new Transition(other, NonAcceptingState_middle));
}
NonAcceptingState_middle.addTransition(new Transition(undesired, NonAcceptingState_sink));
for (char other : others) {
NonAcceptingState_middle.addTransition(new Transition(other, AcceptingState));
}
NonAcceptingState_sink.addTransition(new Transition(undesired, NonAcceptingState_sink));
for (char other : others) {
NonAcceptingState_sink.addTransition(new Transition(other, NonAcceptingState_sink));
}
AcceptingState.addTransition(new Transition(undesired, AcceptingState));
for (char other : others) {
AcceptingState.addTransition(new Transition(other, AcceptingState));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingState_initial);
return resAutomaton;
}
/**
* Get the automaton representing the reverse of !()A constraint for the undesired letter of an alphabet
*
* @param undesired character
* @param others alphabet without the undesired character
* @return reversed automaton for !()undesired
*/
public static Automaton getNegativeReversedNextAutomaton(char undesired, char[] others) {
State AcceptingState_initial = new State();
State AcceptingState_middle = new State();
AcceptingState_initial.setAccept(true);
AcceptingState_middle.setAccept(true);
State NonAcceptingState_b = new State();
State NonAcceptingState_c = new State();
AcceptingState_initial.addTransition(new Transition(undesired, AcceptingState_middle));
for (char other : others) {
AcceptingState_initial.addTransition(new Transition(other, AcceptingState_initial));
}
AcceptingState_middle.addTransition(new Transition(undesired, NonAcceptingState_b));
for (char other : others) {
AcceptingState_middle.addTransition(new Transition(other, NonAcceptingState_c));
}
NonAcceptingState_b.addTransition(new Transition(undesired, NonAcceptingState_b));
for (char other : others) {
NonAcceptingState_b.addTransition(new Transition(other, NonAcceptingState_c));
}
NonAcceptingState_c.addTransition(new Transition(undesired, AcceptingState_middle));
for (char other : others) {
NonAcceptingState_c.addTransition(new Transition(other, AcceptingState_initial));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(AcceptingState_initial);
return resAutomaton;
}
/**
* Get the automaton representing the []<>A constraint for a desired letter of an alphabet
*
* @param desired desired character
* @param others alphabet without the desired character
* @return automaton for []<>desired
*/
public static Automaton getLastAutomaton(char desired, char[] others) {
State NonAcceptingState = new State();
State AcceptingState = new State();
AcceptingState.setAccept(true);
NonAcceptingState.addTransition(new Transition(desired, AcceptingState));
for (char other : others) {
NonAcceptingState.addTransition(new Transition(other, NonAcceptingState));
}
AcceptingState.addTransition(new Transition(desired, AcceptingState));
for (char other : others) {
AcceptingState.addTransition(new Transition(other, NonAcceptingState));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingState);
return resAutomaton;
}
/**
* Get the automaton representing the []<->A constraint FROM START for a desired letter of an alphabet
*
* @param desired desired character
* @param others alphabet without the desired character
* @return automaton for []<->desired
*/
public static Automaton getFirstAutomaton(char desired, char[] others) {
State NonAcceptingStateInitial = new State();
State NonAcceptingState = new State();
State AcceptingState = new State();
AcceptingState.setAccept(true);
NonAcceptingStateInitial.addTransition(new Transition(desired, AcceptingState));
for (char other : others) {
NonAcceptingStateInitial.addTransition(new Transition(other, NonAcceptingState));
}
NonAcceptingState.addTransition(new Transition(desired, NonAcceptingState));
for (char other : others) {
NonAcceptingState.addTransition(new Transition(other, NonAcceptingState));
}
AcceptingState.addTransition(new Transition(desired, AcceptingState));
for (char other : others) {
AcceptingState.addTransition(new Transition(other, AcceptingState));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingStateInitial);
return resAutomaton;
}
/**
* Get the automaton representing the "exactly N occurrences of A" constraint for a desired letter of an alphabet
*
* @param desired desired character
* @param others alphabet without the desired character
* @param n precise number of participation desired
* @return automaton for "exactly N occurrences of desired"
*/
public static Automaton getPreciseParticipationAutomaton(char desired, char[] others, int n) {
/* TODO check n>0*/
State NonAcceptingStateInitial = new State();
State NonAcceptingStateSink = new State();
State lastState = NonAcceptingStateInitial;
for (int i = 0; i < n; i++) {
for (char other : others) {
lastState.addTransition(new Transition(other, lastState));
}
State nextState = new State();
lastState.addTransition(new Transition(desired, nextState));
lastState = nextState;
}
lastState.setAccept(true);
lastState.addTransition(new Transition(desired, NonAcceptingStateSink));
for (char other : others) {
lastState.addTransition(new Transition(other, lastState));
}
NonAcceptingStateSink.addTransition(new Transition(desired, NonAcceptingStateSink));
for (char other : others) {
NonAcceptingStateSink.addTransition(new Transition(other, NonAcceptingStateSink));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingStateInitial);
return resAutomaton;
}
/**
* Get the automaton checking if the current transition is equal to a desired letter of an alphabet
* i.e. checks the present
*
* @param desired desired character
* @param others alphabet without the desired character
* @return automaton for []<->desired
*/
public static Automaton getPresentAutomaton(char desired, char[] others) {
State NonAcceptingState_initial = new State();
State AcceptingState = new State();
AcceptingState.setAccept(true);
NonAcceptingState_initial.addTransition(new Transition(desired, AcceptingState));
AcceptingState.addTransition(new Transition(desired, AcceptingState));
for (char other : others) {
NonAcceptingState_initial.addTransition(new Transition(other, NonAcceptingState_initial));
AcceptingState.addTransition(new Transition(other, NonAcceptingState_initial));
}
Automaton resAutomaton = new Automaton();
resAutomaton.setInitialState(NonAcceptingState_initial);
return resAutomaton;
}
}
| 28,147 | 40.333333 | 132 | java |
Janus | Janus-master/src/minerful/reactive/dfg/DFG.java | package minerful.reactive.dfg;
import minerful.concept.AbstractTaskClass;
import minerful.concept.TaskClass;
import minerful.logparser.LogTraceParser;
import minerful.logparser.XesEventParser;
import minerful.logparser.XesLogParser;
import minerful.logparser.XesTraceParser;
import minerful.reactive.variant.DFGEncodedLog;
import minerful.reactive.variant.DFGEncodedEvent;
import org.deckfour.xes.model.XAttributeTimestamp;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* Class representing a DFG (Directly Follow Graph), i.e., storing al the direct transitions in an event log along with their times and counters
*/
public class DFG {
Map<TaskClass, DFGNode> tasks;
/**
* Constructor. Build an empty DFG
*/
public DFG() {
this.tasks = new HashMap<>();
}
/**
* Returns the map of tasks nodes contained in the DFG
*
* @return
*/
public Map<TaskClass, DFGNode> getTasks() {
return tasks;
}
/**
* Computes for all the transitions in the DFG the average time required by the transition
*/
public void computeAllAverages() {
for (DFGNode node : tasks.values()) {
for (DFGTransition tr : node.outgoingTransitions.values()) {
tr.computeAverage();
}
}
}
/**
* Create a transition between two nodes or Update it if already existing
*
* @param previousNode
* @param destinationNode
* @param timeDiff
*/
public void addTransition(DFGNode previousNode, DFGNode destinationNode, long timeDiff) {
this.tasks.putIfAbsent(previousNode.task, previousNode);
this.tasks.putIfAbsent(destinationNode.task, destinationNode);
previousNode.addTransition(destinationNode, timeDiff);
}
/**
* Retrun the transition between two nodes if existing in the DFG, NULL otherwise.
*
* @param source
* @param destination
* @return
*/
public DFGTransition getTransition(TaskClass source, TaskClass destination) {
try {
return tasks.get(source).outgoingTransitions.get(new DFGNode(destination));
} catch (NullPointerException E) {
return null;
}
}
public static DFG buildDFGFromEncodedLog(DFGEncodedLog eLog) {
return buildDFGFromEncodedLog(eLog.traces);
}
public static DFG buildDFGFromEncodedLog(List<List<DFGEncodedEvent>> traces) {
DFG result = new DFG();
for (List<DFGEncodedEvent> trace : traces) {
DFGEncodedEvent previous = trace.get(0);
DFGNode previousNode = result.tasks.getOrDefault(previous.eventsSequence, new DFGNode(previous.eventsSequence));
DFGNode currentNode;
boolean flag = true;
for (DFGEncodedEvent current : trace) {
if (flag) {
// skip first event
flag = false;
continue;
}
currentNode = result.tasks.getOrDefault(current.eventsSequence, new DFGNode(current.eventsSequence));
long timeDiff = Math.abs(current.timesSequence - previous.timesSequence);
result.addTransition(previousNode, currentNode, timeDiff);
previous = current;
previousNode = currentNode;
}
}
result.computeAllAverages();
return result;
}
/**
* Return the DFG of a given XES event log. It is expected that the timestamp is stored in the attribute "time:timestamp"
*
* @param eventLogParser
* @return
*/
public static DFG buildDFGFromXesLogParser(XesLogParser eventLogParser) {
DFG result = new DFG();
for (Iterator<LogTraceParser> logIterator = eventLogParser.traceIterator(); logIterator.hasNext(); ) {
XesTraceParser traceParser = (XesTraceParser) logIterator.next();
traceParser.init(); // otherwise, if the trace was already read, the iterator is pointing to the end of the trace
XesEventParser previous = (XesEventParser) traceParser.parseSubsequent();
AbstractTaskClass previousTaskClass = previous.getEvent().getTaskClass();
DFGNode previousNode = result.tasks.getOrDefault(previousTaskClass, new DFGNode(previousTaskClass));
DFGNode currentNode;
AbstractTaskClass currentTaskClass;
XesEventParser current;
Date prevDate;
Date currDate;
long timeDiff;
while (!traceParser.isParsingOver()) {
current = (XesEventParser) traceParser.parseSubsequent();
currentTaskClass = current.getEvent().getTaskClass();
currentNode = result.tasks.getOrDefault(currentTaskClass, new DFGNode(currentTaskClass));
prevDate = ((XAttributeTimestamp) previous.xesEvent.getAttributes().get("time:timestamp")).getValue();
currDate = ((XAttributeTimestamp) current.xesEvent.getAttributes().get("time:timestamp")).getValue();
timeDiff = TimeUnit.SECONDS.convert(Math.abs(currDate.getTime() - prevDate.getTime()), TimeUnit.MILLISECONDS);
result.addTransition(previousNode, currentNode, timeDiff);
previous = current;
previousNode = currentNode;
}
}
result.computeAllAverages();
return result;
}
/**
* Build a Graphviz graphical representation of the DFG and save it.
*
* @param outputPath
*/
public void toDot(String outputPath) {
System.out.println("Not yet Implemented");
}
@Override
public String toString() {
StringBuilder result = new StringBuilder();
result.append("DFG:\n");
for (DFGNode task : tasks.values()) {
result.append("\t " + task.task + "\n");
for (DFGTransition tran : task.outgoingTransitions.values()) {
result.append("\t\t" + tran + "\n");
}
}
return result.toString();
}
} | 6,109 | 33.715909 | 144 | java |
Janus | Janus-master/src/minerful/reactive/dfg/DFGNode.java | package minerful.reactive.dfg;
import minerful.concept.TaskClass;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
/**
* Node of a DFG. It contains a task from the event log and all the transition from this task
*/
public class DFGNode {
TaskClass task;
Map<DFGNode, DFGTransition> outgoingTransitions; // destination node-> transition from this node to the destination
/**
* Constructor. Build a node for the given task with no transitions.
*
* @param task
*/
public DFGNode(TaskClass task) {
this.task = task;
this.outgoingTransitions = new HashMap<>();
}
/**
* Create or update a transition from this node to the destination one with a certain time.
*
* @param destination
* @param timeDifference
*/
public void addTransition(DFGNode destination, long timeDifference) {
// IF destination already exists, update existing transition
if (outgoingTransitions.containsKey(destination)) {
outgoingTransitions.get(destination).update(timeDifference);
}
// ELSE create a new transition
else {
DFGTransition t = new DFGTransition(this, destination, timeDifference);
this.outgoingTransitions.put(destination, t);
}
}
@Override
public String toString() {
return task.getName();
}
// Equivalence performed only over the task, not the transitions
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DFGNode dfgNode = (DFGNode) o;
return Objects.equals(task, dfgNode.task);
}
@Override
public int hashCode() {
return Objects.hash(task);
}
} | 1,809 | 28.193548 | 119 | java |
Janus | Janus-master/src/minerful/reactive/dfg/DFGTransition.java | package minerful.reactive.dfg;
/**
* Transition of a DFG from a node to another with its time stats.
* It stores:
* - the number of time the transition has been traversed in the event log
* - the average time required for the transition
* - the maximum/minimum time taken by this transition in the event log
*/
public class DFGTransition {
DFGNode source;
DFGNode destination;
public float getTimeAvg() {
return timeAvg;
}
public long getTimeSum() {
return timeSum;
}
public long getTimeMin() {
return timeMin;
}
public long getTimeMax() {
return timeMax;
}
public int getCounter() {
return counter;
}
float timeAvg; // average of the time taken by this transition in the event log
long timeSum; // sum of the total time taken by this transition in the event log
long timeMin; // minimum time taken by this transition in the event log
long timeMax; // maximum time taken by this transition in the event log
int counter; // number of occurrences of this transition in the event log
/**
* Constructor given the source and destination nodes and their time difference
*
* @param source
* @param destination
* @param timeDiff
*/
public DFGTransition(DFGNode source, DFGNode destination, long timeDiff) {
this.source = source;
this.destination = destination;
this.counter = 1;
this.timeSum = timeDiff;
this.timeMin = timeDiff;
this.timeMax = timeDiff;
}
/**
* Compute the current average and return it in output
*
* @return
*/
public float computeAverage() {
this.timeAvg = (float) this.timeSum / this.counter;
return this.timeAvg;
}
/**
* update the current counters given a new transition occurrence
*
* @param newTimeDifference
*/
public void update(long newTimeDifference) {
this.timeSum += newTimeDifference;
this.timeMin = Math.min(newTimeDifference, this.timeMin);
this.timeMax = Math.max(newTimeDifference, this.timeMax);
counter++;
}
/**
* update the current counters given a new transition occurrence and compute immediately the new average
*
* @param newTimeDifference
*/
public void updateWithAverage(long newTimeDifference) {
update(newTimeDifference);
computeAverage();
}
@Override
public String toString() {
return "[" + source + "]-->[" + destination + "]{" +
"avg=" + timeAvg +
", sum=" + timeSum +
", min=" + timeMin +
", max=" + timeMax +
", counter=" + counter +
'}';
}
}
| 2,795 | 26.683168 | 108 | java |
Janus | Janus-master/src/minerful/reactive/io/JanusDFGVariantOutputManagementLauncher.java | package minerful.reactive.io;
import minerful.MinerFulOutputManagementLauncher;
import minerful.params.SystemCmdParameters;
import minerful.reactive.params.JanusDFGVariantCmdParameters;
import minerful.reactive.params.JanusPrintParameters;
import minerful.reactive.variant.DFGPermutationResult;
import minerful.reactive.variant.DFGtimesVariantAnalysisCore;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
/**
* Class to handle the output of Janus
*/
public class JanusDFGVariantOutputManagementLauncher extends MinerFulOutputManagementLauncher {
/**
* reads the terminal input parameters and launch the proper output functions
*
* @param varParams
* @param janusViewParams
* @param systemParams
*/
public void manageVariantOutput(
List<DFGPermutationResult> variantResults,
JanusDFGVariantCmdParameters varParams,
JanusPrintParameters janusViewParams,
SystemCmdParameters systemParams) {
manageVariantOutput(variantResults, varParams, janusViewParams, systemParams, "LOG1VALUE", "LOG2VALUE");
}
/**
* reads the terminal input parameters and launch the proper output functions
*
* @param varParams
* @param janusViewParams
* @param systemParams
*/
public void manageVariantOutput(
List<DFGPermutationResult> variantResults,
JanusDFGVariantCmdParameters varParams,
JanusPrintParameters janusViewParams,
SystemCmdParameters systemParams,
String log1Name,
String log2Name) {
File outputFile = null;
// ************* CSV
if (varParams.outputCvsFile != null) {
outputFile = retrieveFile(varParams.outputCvsFile);
logger.info("Saving variant analysis result as CSV in " + outputFile + "...");
double before = System.currentTimeMillis();
exportVariantResultsToCSV(variantResults, outputFile, log1Name, log2Name);
double after = System.currentTimeMillis();
logger.info("Total CSV serialization time: " + (after - before));
}
if (janusViewParams != null && !janusViewParams.suppressResultsPrintOut) {
printVariantResultsToScreen(variantResults);
}
// ************* JSON
if (varParams.outputJsonFile != null) {
outputFile = retrieveFile(varParams.outputJsonFile);
logger.info("Saving variant analysis result as JSON in " + outputFile + "...");
double before = System.currentTimeMillis();
// TODO
logger.info("JSON output yet not implemented");
double after = System.currentTimeMillis();
logger.info("Total JSON serialization time: " + (after - before));
}
}
private void printVariantResultsToScreen(List<DFGPermutationResult> variantResults) {
// header row
System.out.println("--------------------");
System.out.println("relevant transitions time differences: " + variantResults.size());
System.out.println();
for (DFGPermutationResult r : variantResults) {
System.out.println(r.toString());
}
System.out.println();
}
private void exportVariantResultsToCSV(List<DFGPermutationResult> variantResults, File outputFile, String log1Name, String log2Name) {
// header row
try {
String[] headerDetailed = {"FROM", "TO", "PERSPECTIVE", "pVALUE", "DIFFERENCE", log1Name, log2Name};
FileWriter fwDetailed = new FileWriter(outputFile);
CSVPrinter printerDetailed = new CSVPrinter(fwDetailed, CSVFormat.DEFAULT.withHeader(headerDetailed).withDelimiter(';'));
for (DFGPermutationResult r : variantResults) {
printerDetailed.printRecord(new String[]{
r.sourceNode,
r.destinationNode,
r.kind,
String.format("%.3f", r.pValue),
String.format("%.3f", r.diff / 60), // minutes
String.format("%.3f", r.log1Value / 60), // minutes
String.format("%.3f", r.log2Value / 60) // minutes
});
}
fwDetailed.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
| 4,538 | 35.604839 | 138 | java |
Janus | Janus-master/src/minerful/reactive/io/JanusMeasurementsOutputManagementLauncher.java | package minerful.reactive.io;
import com.google.gson.*;
import minerful.MinerFulOutputManagementLauncher;
import minerful.concept.TaskCharArchive;
import minerful.io.params.OutputModelParameters;
import minerful.logparser.LogTraceParser;
import minerful.params.SystemCmdParameters;
import minerful.reactive.automaton.SeparatedAutomatonOfflineRunner;
import minerful.reactive.measurements.Measures;
import minerful.reactive.measurements.MegaMatrixMonster;
import minerful.reactive.params.JanusMeasurementsCmdParameters;
import minerful.reactive.params.JanusMeasurementsCmdParameters.DetailLevel;
import minerful.reactive.params.JanusPrintParameters;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import java.io.*;
import java.util.*;
/**
* Class to handle the output of Janus
*/
public class JanusMeasurementsOutputManagementLauncher extends MinerFulOutputManagementLauncher {
/**
* reads the terminal input parameters and launch the proper output functions
*
* @param matrix
* @param outParams
* @param janusViewParams
* @param systemParams
* @param alphabet
*/
public void manageMeasurementsOutput(MegaMatrixMonster matrix,
JanusPrintParameters janusViewParams,
OutputModelParameters outParams,
SystemCmdParameters systemParams,
JanusMeasurementsCmdParameters measurementsParams,
TaskCharArchive alphabet) {
String baseOutputPath;
File outputFile;
System.gc();
// ************* CSV
if (outParams.fileToSaveConstraintsAsCSV != null) {
baseOutputPath = outParams.fileToSaveConstraintsAsCSV.getAbsolutePath().substring(0, outParams.fileToSaveConstraintsAsCSV.getAbsolutePath().indexOf(".csv"));
// outputFile = retrieveFile(outParams.fileToSaveConstraintsAsCSV);
logger.info("Saving the measures as CSV in " + baseOutputPath + "...");
double before = System.currentTimeMillis();
// Events evaluation
if (
measurementsParams.detailsLevel.equals(DetailLevel.event) ||
measurementsParams.detailsLevel.equals(DetailLevel.all)
) {
logger.info("Events Evaluation...");
outputFile = new File(baseOutputPath.concat("[eventsEvaluation].csv"));
if (matrix.getEventsEvaluationMatrixLite() == null) {
exportEventsEvaluationToCSV(matrix, outputFile, outParams.encodeOutputTasks, alphabet);
} else {
exportEventsEvaluationLiteToCSV(matrix, outputFile, outParams.encodeOutputTasks, alphabet);
}
}
// Trace Measures
if (
measurementsParams.detailsLevel.equals(DetailLevel.trace) ||
measurementsParams.detailsLevel.equals(DetailLevel.allTrace) ||
measurementsParams.detailsLevel.equals(DetailLevel.all)
) {
logger.info("Traces Measures...");
outputFile = new File(baseOutputPath.concat("[tracesMeasures].csv"));
exportTracesMeasuresToCSV(matrix, outputFile, measurementsParams, outParams.encodeOutputTasks, alphabet);
}
// Trace Measures descriptive statistics
if (
measurementsParams.detailsLevel.equals(DetailLevel.traceStats) ||
measurementsParams.detailsLevel.equals(DetailLevel.allTrace) ||
measurementsParams.detailsLevel.equals(DetailLevel.allLog) ||
measurementsParams.detailsLevel.equals(DetailLevel.all)
) {
logger.info("Traces Measures Stats...");
outputFile = new File(baseOutputPath.concat("[tracesMeasuresStats].csv"));
exportTracesMeasuresStatisticsToCSV(matrix, outputFile, measurementsParams, outParams.encodeOutputTasks, alphabet);
}
// Log Measures
if (
measurementsParams.detailsLevel.equals(DetailLevel.log) ||
measurementsParams.detailsLevel.equals(DetailLevel.allLog) ||
measurementsParams.detailsLevel.equals(DetailLevel.all)
) {
logger.info("Log Measures...");
outputFile = new File(baseOutputPath.concat("[logMeasures].csv"));
exportLogMeasuresToCSV(matrix, outputFile, measurementsParams, outParams.encodeOutputTasks, alphabet);
}
double after = System.currentTimeMillis();
logger.info("Total CSV serialization time: " + (after - before));
}
if (!janusViewParams.suppressResultsPrintOut) {
// TODO print result in terminal
logger.info("Terminal output yet not implemented");
}
if (outParams.fileToSaveAsXML != null) {
// TODO XML output
logger.info("XML output yet not implemented");
}
// ************* JSON
if (outParams.fileToSaveAsJSON != null) {
baseOutputPath = outParams.fileToSaveAsJSON.getAbsolutePath().substring(0, outParams.fileToSaveAsJSON.getAbsolutePath().indexOf(".json"));
// outputFile = retrieveFile(outParams.fileToSaveAsJSON);
logger.info("Saving the measures as JSON in " + baseOutputPath + "...");
double before = System.currentTimeMillis();
// Events evaluation
if (
measurementsParams.detailsLevel.equals(DetailLevel.event) ||
measurementsParams.detailsLevel.equals(DetailLevel.all)
) {
logger.info("Events Evaluation...");
outputFile = new File(baseOutputPath.concat("[eventsEvaluation].json"));
if (matrix.getEventsEvaluationMatrixLite() == null) {
exportEventsEvaluationToJson(matrix, outputFile, outParams.encodeOutputTasks, alphabet);
} else {
exportEventsEvaluationLiteToJson(matrix, outputFile, outParams.encodeOutputTasks, alphabet);
}
}
// Trace Measures
if (
measurementsParams.detailsLevel.equals(DetailLevel.trace) ||
measurementsParams.detailsLevel.equals(DetailLevel.allTrace) ||
measurementsParams.detailsLevel.equals(DetailLevel.all)
) {
logger.info("Traces Measures...");
outputFile = new File(baseOutputPath.concat("[tracesMeasures].json"));
exportTracesMeasuresToJson(matrix, outputFile, measurementsParams, outParams.encodeOutputTasks, alphabet);
}
// Trace Measures descriptive statistics
if (
measurementsParams.detailsLevel.equals(DetailLevel.traceStats) ||
measurementsParams.detailsLevel.equals(DetailLevel.allTrace) ||
measurementsParams.detailsLevel.equals(DetailLevel.allLog) ||
measurementsParams.detailsLevel.equals(DetailLevel.all)
) {
logger.info("Traces Measures Stats...");
outputFile = new File(baseOutputPath.concat("[tracesMeasuresStats].json"));
exportTracesMeasuresStatisticsToJson(matrix, outputFile, measurementsParams, outParams.encodeOutputTasks, alphabet);
}
// Log Measures
if (
measurementsParams.detailsLevel.equals(DetailLevel.log) ||
measurementsParams.detailsLevel.equals(DetailLevel.allLog) ||
measurementsParams.detailsLevel.equals(DetailLevel.all)
) {
logger.info("Log Measures...");
outputFile = new File(baseOutputPath.concat("[logMeasures].json"));
exportLogMeasuresToJson(matrix, outputFile, measurementsParams, outParams.encodeOutputTasks, alphabet);
}
double after = System.currentTimeMillis();
logger.info("Total JSON serialization time: " + (after - before));
}
logger.info("Output encoding: " + outParams.encodeOutputTasks);
}
/**
* Export to CSV the events evaluation.
* The output contains the events evaluation for each traces of each constraint (including the entire model) the evaluation
*
* @param megaMatrix events evaluation matrix
* @param outputFile CSV output file base
* @param encodeOutputTasks if true, the events are encoded, decoded otherwise
* @param alphabet alphabet to decode the events
*/
public void exportEventsEvaluationToCSV(MegaMatrixMonster megaMatrix, File outputFile, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("CSV events serialization...");
// header row
String[] header = ArrayUtils.addAll(new String[]{
"Trace",
"Constraint",
"Events-Evaluation"
});
try {
FileWriter fw = new FileWriter(outputFile);
CSVPrinter printer = new CSVPrinter(fw, CSVFormat.DEFAULT.withHeader(header).withDelimiter(';'));
byte[][][] matrix = megaMatrix.getEventsEvaluationMatrix();
Iterator<LogTraceParser> it = megaMatrix.getLog().traceIterator();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// Row builder
// for the entire log
for (int trace = 0; trace < matrix.length; trace++) {
LogTraceParser tr = it.next();
String traceString;
if (encodeOutputTasks) {
traceString = tr.encodeTrace();
} else {
traceString = tr.printStringTrace();
}
for (int constraint = 0; constraint < matrix[trace].length; constraint++) {
// for each constraint
String constraintName;
if (constraint == matrix[trace].length - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
String[] row = ArrayUtils.addAll(
new String[]{
traceString,
constraintName,
Arrays.toString(matrix[trace][constraint])
});
printer.printRecord(row);
}
}
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Export to CSV the detailed result at the level of the events in all the traces.
*
* @param megaMatrix
* @param outputFile
* @param encodeOutputTasks
* @param alphabet
*/
public void exportEventsEvaluationLiteToCSV(MegaMatrixMonster megaMatrix, File outputFile, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("CSV events LITE serialization...");
// header row
// TODO make the columns parametric, not hard-coded
String[] header = ArrayUtils.addAll(new String[]{
"Trace",
"Constraint",
"N(A)",
"N(T)",
"N(¬A)",
"N(¬T)",
"N(¬A¬T)",
"N(¬AT)",
"N(A¬T)",
"N(AT)",
"Lenght"
});
try {
FileWriter fw = new FileWriter(outputFile);
CSVPrinter printer = new CSVPrinter(fw, CSVFormat.DEFAULT.withHeader(header).withDelimiter(';'));
int[][][] matrix = megaMatrix.getEventsEvaluationMatrixLite();
Iterator<LogTraceParser> it = megaMatrix.getLog().traceIterator();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// Row builder
// for the entire log
for (int trace = 0; trace < matrix.length; trace++) {
LogTraceParser tr = it.next();
String traceString;
if (encodeOutputTasks) {
traceString = tr.encodeTrace();
} else {
traceString = tr.printStringTrace();
}
// for each trace
for (int constraint = 0; constraint < matrix[trace].length; constraint++) {
// for each constraint
String constraintName;
if (constraint == matrix[trace].length - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
String[] row = ArrayUtils.addAll(
new String[]{
traceString,
constraintName,
String.valueOf(matrix[trace][constraint][0]),
String.valueOf(matrix[trace][constraint][1]),
String.valueOf(matrix[trace][constraint][2]),
String.valueOf(matrix[trace][constraint][3]),
String.valueOf(matrix[trace][constraint][4]),
String.valueOf(matrix[trace][constraint][5]),
String.valueOf(matrix[trace][constraint][6]),
String.valueOf(matrix[trace][constraint][7]),
String.valueOf(matrix[trace][constraint][8])
});
printer.printRecord(row);
}
}
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Export to CSV the events evaluation.
* The output contains the events evaluation for each traces of each constraint (including the entire model) the evaluation
*
* @param megaMatrix events evaluation matrix
* @param outputFile CSV output file base
* @param measurementsParams
* @param encodeOutputTasks if true, the events are encoded, decoded otherwise
* @param alphabet alphabet to decode the events
*/
public void exportTracesMeasuresToCSV(MegaMatrixMonster megaMatrix, File outputFile, JanusMeasurementsCmdParameters measurementsParams, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("CSV trace measures serialization...");
// header row
String[] header;
if (measurementsParams.measure.equals(measurementsParams.getDefaultMeasure())) {
header = ArrayUtils.addAll(new String[]{
"Trace",
"Constraint",
}, Measures.MEASURE_NAMES);
} else {
header = ArrayUtils.addAll(new String[]{
"Trace",
"Constraint",
measurementsParams.measure
});
}
try {
FileWriter fw = new FileWriter(outputFile);
CSVPrinter printer = new CSVPrinter(fw, CSVFormat.DEFAULT.withHeader(header).withDelimiter(';'));
int contraintsNum = megaMatrix.getConstraintsNumber();
Iterator<LogTraceParser> it = megaMatrix.getLog().traceIterator();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// Row builder
// for the entire log
for (int trace = 0; trace < megaMatrix.getLog().wholeLength(); trace++) {
LogTraceParser tr = it.next();
String traceString;
if (encodeOutputTasks) {
traceString = tr.encodeTrace();
} else {
traceString = tr.printStringTrace();
}
for (int constraint = 0; constraint < contraintsNum; constraint++) {
// for each constraint
String[] measurements;
if (measurementsParams.measure.equals(measurementsParams.getDefaultMeasure())) {
measurements = new String[Measures.MEASURE_NUM];
for (int measureIndex = 0; measureIndex < Measures.MEASURE_NUM; measureIndex++) {
measurements[measureIndex] = String.valueOf(megaMatrix.getSpecificMeasure(trace, constraint, measureIndex));
}
} else {
measurements = new String[1];
measurements[0] = String.valueOf(megaMatrix.getTraceMeasuresMatrix()[trace][constraint][0]);
}
String constraintName;
if (constraint == contraintsNum - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
String[] row = ArrayUtils.addAll(
new String[]{
traceString,
constraintName
}, measurements);
printer.printRecord(row);
}
}
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Export to CSV format the aggregated measures at the level of log.
* <p>
* the columns index is:
* constraint; quality-measure; duck-tape; mean; geometric-mean; variance; ....(all the other stats)
*
* @param megaMatrix
* @param outputAggregatedMeasuresFile
* @param measurementsParams
* @param encodeOutputTasks
* @param alphabet
*/
public void exportTracesMeasuresStatisticsToCSV(MegaMatrixMonster megaMatrix, File outputAggregatedMeasuresFile, JanusMeasurementsCmdParameters measurementsParams, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("CSV aggregated measures...");
SummaryStatistics[][] constraintsLogMeasure = megaMatrix.getTraceMeasuresDescriptiveStatistics();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// header row
// TODO make the columns parametric, not hard-coded
String[] header = new String[]{
"Constraint",
"Quality-Measure",
// "Duck-Tape",
"Mean",
"Geometric-Mean",
"Variance",
"Population-variance",
"Standard-Deviation",
// "Percentile-75th",
"Max",
"Min"
};
try {
FileWriter fw = new FileWriter(outputAggregatedMeasuresFile);
CSVPrinter printer = new CSVPrinter(fw, CSVFormat.DEFAULT.withHeader(header).withDelimiter(';'));
// Row builder
for (int constraint = 0; constraint < constraintsLogMeasure.length; constraint++) {
String constraintName;
if (constraint == constraintsLogMeasure.length - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
SummaryStatistics[] constraintLogMeasure = constraintsLogMeasure[constraint]; //TODO performance slowdown
if (measurementsParams.measure.equals(measurementsParams.getDefaultMeasure())) {
for (int measureIndex = 0; measureIndex < megaMatrix.getMeasureNames().length; measureIndex++) {
// System.out.print("\rConstraints: " + constraint + "/" + constraintsLogMeasure.length+" Measure: " + measureIndex + "/" + megaMatrix.getMeasureNames().length);
String[] row = new String[]{
constraintName,
megaMatrix.getMeasureName(measureIndex),
// String.valueOf(Measures.getLogDuckTapeMeasures(constraint, measureIndex, megaMatrix.getMatrix())),
String.valueOf(constraintLogMeasure[measureIndex].getMean()),
String.valueOf(constraintLogMeasure[measureIndex].getGeometricMean()),
String.valueOf(constraintLogMeasure[measureIndex].getVariance()),
String.valueOf(constraintLogMeasure[measureIndex].getPopulationVariance()),
String.valueOf(constraintLogMeasure[measureIndex].getStandardDeviation()),
// String.valueOf(constraintLogMeasure[measureIndex].getPercentile(75)),
String.valueOf(constraintLogMeasure[measureIndex].getMax()),
String.valueOf(constraintLogMeasure[measureIndex].getMin())
};
printer.printRecord(row);
}
} else {
String[] row = new String[]{
constraintName,
measurementsParams.measure,
// String.valueOf(Measures.getLogDuckTapeMeasures(constraint, measureIndex, megaMatrix.getMatrix())),
String.valueOf(constraintLogMeasure[0].getMean()),
String.valueOf(constraintLogMeasure[0].getGeometricMean()),
String.valueOf(constraintLogMeasure[0].getVariance()),
String.valueOf(constraintLogMeasure[0].getPopulationVariance()),
String.valueOf(constraintLogMeasure[0].getStandardDeviation()),
// String.valueOf(constraintLogMeasure[measureIndex].getPercentile(75)),
String.valueOf(constraintLogMeasure[0].getMax()),
String.valueOf(constraintLogMeasure[0].getMin())
};
printer.printRecord(row);
}
}
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Export to CSV format the aggregated measures at the level of log.
* <p>
* the columns index is:
* constraint; quality-measure; duck-tape; mean; geometric-mean; variance; ....(all the other stats)
*
* @param megaMatrix
* @param outputAggregatedMeasuresFile
* @param measurementsParams
* @param encodeOutputTasks
* @param alphabet
*/
public void exportLogMeasuresToCSV(MegaMatrixMonster megaMatrix, File outputAggregatedMeasuresFile, JanusMeasurementsCmdParameters measurementsParams, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("CSV log measures...");
float[][] neuConstraintsLogMeasure = megaMatrix.getLogMeasuresMatrix();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// header row
String[] header;
if (measurementsParams.measure.equals(measurementsParams.getDefaultMeasure())) {
header = ArrayUtils.addAll(new String[]{
"Constraint",
}, Measures.MEASURE_NAMES);
} else {
header = ArrayUtils.addAll(new String[]{
"Constraint",
measurementsParams.measure
});
}
try {
FileWriter fw = new FileWriter(outputAggregatedMeasuresFile);
CSVPrinter printer = new CSVPrinter(fw, CSVFormat.DEFAULT.withHeader(header).withDelimiter(';'));
// Row builder
for (int constraint = 0; constraint < neuConstraintsLogMeasure.length; constraint++) {
String constraintName;
if (constraint == neuConstraintsLogMeasure.length - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
LinkedList<String> row = new LinkedList();
row.add(constraintName);
if (measurementsParams.measure.equals(measurementsParams.getDefaultMeasure())) {
for (int measureIndex = 0; measureIndex < megaMatrix.getMeasureNames().length; measureIndex++) {
row.add(String.valueOf(neuConstraintsLogMeasure[constraint][measureIndex]));
}
printer.printRecord(row);
} else {
row.add(String.valueOf(neuConstraintsLogMeasure[constraint][0]));
printer.printRecord(row);
}
}
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Builds the json structure for a given constraint
*/
private JsonElement tracesMeasuresStatisticsJsonBuilder(MegaMatrixMonster megaMatrix, SummaryStatistics[] constraintLogMeasure, JanusMeasurementsCmdParameters measurementsParams) {
JsonObject constraintJson = new JsonObject();
if (measurementsParams.measure.equals(measurementsParams.getDefaultMeasure())) {
for (int measureIndex = 0; measureIndex < megaMatrix.getMeasureNames().length; measureIndex++) {
// JsonObject measure = new JsonObject();
JsonObject stats = new JsonObject();
stats.addProperty("Mean", constraintLogMeasure[measureIndex].getMean());
stats.addProperty("Geometric Mean", constraintLogMeasure[measureIndex].getGeometricMean());
stats.addProperty("Variance", constraintLogMeasure[measureIndex].getVariance());
stats.addProperty("Population variance", constraintLogMeasure[measureIndex].getPopulationVariance());
stats.addProperty("Standard Deviation", constraintLogMeasure[measureIndex].getStandardDeviation());
// stats.addProperty("Percentile 75th", constraintLogMeasure[measureIndex].getPercentile(75));
stats.addProperty("Max", constraintLogMeasure[measureIndex].getMax());
stats.addProperty("Min", constraintLogMeasure[measureIndex].getMin());
// measure.add("stats", stats);
// measure.addProperty("duck tape", Measures.getLogDuckTapeMeasures(constraintIndex, measureIndex, megaMatrix.getMatrix()));
constraintJson.add(megaMatrix.getMeasureName(measureIndex), stats);
}
} else {
JsonObject stats = new JsonObject();
stats.addProperty("Mean", constraintLogMeasure[0].getMean());
stats.addProperty("Geometric Mean", constraintLogMeasure[0].getGeometricMean());
stats.addProperty("Variance", constraintLogMeasure[0].getVariance());
stats.addProperty("Population variance", constraintLogMeasure[0].getPopulationVariance());
stats.addProperty("Standard Deviation", constraintLogMeasure[0].getStandardDeviation());
// stats.addProperty("Percentile 75th", constraintLogMeasure[0].getPercentile(75));
stats.addProperty("Max", constraintLogMeasure[0].getMax());
stats.addProperty("Min", constraintLogMeasure[0].getMin());
// measure.add("stats", stats);
// measure.addProperty("duck tape", Measures.getLogDuckTapeMeasures(constraintIndex, measureIndex, megaMatrix.getMatrix()));
constraintJson.add(measurementsParams.measure, stats);
}
return constraintJson;
}
/**
* Builds the json structure for a given constraint
*/
private JsonElement tracesMeasuresJsonBuilder(MegaMatrixMonster megaMatrix, int traceIndex, JanusMeasurementsCmdParameters measurementsParams, boolean encodeOutputTasks, TaskCharArchive alphabet) {
JsonObject traceJson = new JsonObject();
int constraintsnum = megaMatrix.getConstraintsNumber();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// for each trace
for (int constraint = 0; constraint < constraintsnum; constraint++) {
JsonObject constraintJson = new JsonObject();
// Constraint name
String constraintName;
if (constraint == constraintsnum - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
// trace Measures
if (measurementsParams.measure.equals(measurementsParams.getDefaultMeasure())) {
for (int measureIndex = 0; measureIndex < Measures.MEASURE_NUM; measureIndex++) {
constraintJson.addProperty(Measures.MEASURE_NAMES[measureIndex], megaMatrix.getSpecificMeasure(traceIndex, constraint, measureIndex));
}
} else {
constraintJson.addProperty(measurementsParams.measure, megaMatrix.getTraceMeasuresMatrix()[traceIndex][constraint][0]);
}
traceJson.add(constraintName, constraintJson);
}
return traceJson;
}
/**
* Builds the json structure for a given constraint
*/
private JsonElement logMeasuresJsonBuilder(MegaMatrixMonster megaMatrix, float[] constraintLogMeasure, JanusMeasurementsCmdParameters measurementsParams) {
JsonObject constraintJson = new JsonObject();
if (measurementsParams.measure.equals(measurementsParams.getDefaultMeasure())) {
for (int measureIndex = 0; measureIndex < megaMatrix.getMeasureNames().length; measureIndex++) {
constraintJson.addProperty(megaMatrix.getMeasureName(measureIndex), constraintLogMeasure[measureIndex]);
}
} else {
constraintJson.addProperty(measurementsParams.measure, constraintLogMeasure[0]);
}
return constraintJson;
}
/**
* write the Json file with the Traces Measures Statistics
*
* @param megaMatrix
* @param outputFile
* @param measurementsParams
* @param encodeOutputTasks
* @param alphabet
*/
public void exportTracesMeasuresStatisticsToJson(MegaMatrixMonster megaMatrix, File outputFile, JanusMeasurementsCmdParameters measurementsParams, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("JSON aggregated measures...");
Gson gson = new GsonBuilder().setPrettyPrinting().create();
try {
FileWriter fw = new FileWriter(outputFile);
JsonObject jsonOutput = new JsonObject();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// \/ \/ \/ LOG RESULTS
SummaryStatistics[][] constraintLogMeasure = megaMatrix.getTraceMeasuresDescriptiveStatistics();
String constraintName;
for (int constraint = 0; constraint < constraintLogMeasure.length; constraint++) {
if (constraint == constraintLogMeasure.length - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
jsonOutput.add(
constraintName,
tracesMeasuresStatisticsJsonBuilder(megaMatrix, constraintLogMeasure[constraint], measurementsParams)
);
}
gson.toJson(jsonOutput, fw);
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
logger.debug("JSON encoded aggregated measures...DONE!");
}
/**
* write the jon file with the aggregated measures
*
* @param megaMatrix
* @param outputFile
* @param measurementsParams
* @param encodeOutputTasks
* @param alphabet
*/
public void exportLogMeasuresToJson(MegaMatrixMonster megaMatrix, File outputFile, JanusMeasurementsCmdParameters measurementsParams, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("JSON log measures...");
Gson gson = new GsonBuilder().setPrettyPrinting().create();
try {
FileWriter fw = new FileWriter(outputFile);
JsonObject jsonOutput = new JsonObject();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// \/ \/ \/ LOG RESULTS
float[][] neuConstraintsLogMeasure = megaMatrix.getLogMeasuresMatrix();
String constraintName;
for (int constraint = 0; constraint < neuConstraintsLogMeasure.length; constraint++) {
if (constraint == neuConstraintsLogMeasure.length - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
jsonOutput.add(
constraintName,
logMeasuresJsonBuilder(megaMatrix, neuConstraintsLogMeasure[constraint], measurementsParams)
);
}
gson.toJson(jsonOutput, fw);
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
logger.debug("JSON encoded aggregated measures...DONE!");
}
/**
* Serialize the events evaluations into a Json file to have a readable result
*
* @param megaMatrix
* @param outputFile
* @param encodeOutputTasks
* @param alphabet
*/
public void exportEventsEvaluationToJson(MegaMatrixMonster megaMatrix, File outputFile, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("JSON readable serialization...");
Gson gson = new GsonBuilder().setPrettyPrinting().create();
try {
FileWriter fw = new FileWriter(outputFile);
JsonObject jsonOutput = new JsonObject();
byte[][][] matrix = megaMatrix.getEventsEvaluationMatrix();
Iterator<LogTraceParser> it = megaMatrix.getLog().traceIterator();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// for the entire log
for (int trace = 0; trace < matrix.length; trace++) {
JsonObject traceJson = new JsonObject();
LogTraceParser tr = it.next();
tr.init();
String traceString;
if (encodeOutputTasks) {
traceString = tr.encodeTrace();
} else {
traceString = tr.printStringTrace();
}
// for each trace
for (int constraint = 0; constraint < matrix[trace].length; constraint++) {
tr.init();
// contraint name
String constraintName;
if (constraint == matrix[trace].length - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
// events evaluation
JsonArray eventsJson = new JsonArray();
for (byte e : matrix[trace][constraint]) {
eventsJson.add(Integer.valueOf(e));
}
traceJson.add(constraintName, eventsJson);
}
jsonOutput.add(traceString, traceJson);
}
gson.toJson(jsonOutput, fw);
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
logger.debug("JSON readable serialization...DONE!");
}
/**
* Serialize the 3D matrix into a Json file to have a readable result
*
* @param megaMatrix
* @param outputFile
* @param encodeOutputTasks
* @param alphabet
*/
public void exportEventsEvaluationLiteToJson(MegaMatrixMonster megaMatrix, File outputFile, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("JSON readable serialization...");
Gson gson = new GsonBuilder().setPrettyPrinting().create();
try {
FileWriter fw = new FileWriter(outputFile);
JsonObject jsonOutput = new JsonObject();
int[][][] matrix = megaMatrix.getEventsEvaluationMatrixLite();
Iterator<LogTraceParser> it = megaMatrix.getLog().traceIterator();
List<SeparatedAutomatonOfflineRunner> automata = (List) megaMatrix.getAutomata();
// for the entire log
for (int trace = 0; trace < matrix.length; trace++) {
JsonObject traceJson = new JsonObject();
LogTraceParser tr = it.next();
tr.init();
String traceString;
if (encodeOutputTasks) {
traceString = tr.encodeTrace();
} else {
traceString = tr.printStringTrace();
}
// for each trace
for (int constraint = 0; constraint < matrix[trace].length; constraint++) {
tr.init();
// for each constraint
String constraintName;
if (constraint == matrix[trace].length - 1) {
constraintName = "MODEL";
} else {
if (encodeOutputTasks) {
constraintName = automata.get(constraint).toString();
} else {
constraintName = automata.get(constraint).toStringDecoded(alphabet.getTranslationMapById());
}
}
JsonObject frequenciesJson = new JsonObject();
frequenciesJson.addProperty("N(A)", matrix[trace][constraint][0]);
frequenciesJson.addProperty("N(T)", matrix[trace][constraint][1]);
frequenciesJson.addProperty("N(¬A)", matrix[trace][constraint][2]);
frequenciesJson.addProperty("N(¬T)", matrix[trace][constraint][3]);
frequenciesJson.addProperty("N(¬A¬T)", matrix[trace][constraint][4]);
frequenciesJson.addProperty("N(¬AT)", matrix[trace][constraint][5]);
frequenciesJson.addProperty("N(A¬T)", matrix[trace][constraint][6]);
frequenciesJson.addProperty("N(AT)", matrix[trace][constraint][7]);
frequenciesJson.addProperty("Length", matrix[trace][constraint][8]);
traceJson.add(constraintName, frequenciesJson);
}
jsonOutput.add(traceString, traceJson);
}
gson.toJson(jsonOutput, fw);
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
logger.debug("JSON readable serialization...DONE!");
}
/**
* Serialize the events evaluations into a Json file to have a readable result
*
* @param megaMatrix
* @param outputFile
* @param measurementsParams
* @param encodeOutputTasks
* @param alphabet
*/
public void exportTracesMeasuresToJson(MegaMatrixMonster megaMatrix, File outputFile, JanusMeasurementsCmdParameters measurementsParams, boolean encodeOutputTasks, TaskCharArchive alphabet) {
logger.debug("JSON trace measures...");
Gson gson = new GsonBuilder().setPrettyPrinting().create();
try {
FileWriter fw = new FileWriter(outputFile);
JsonObject jsonOutput = new JsonObject();
Iterator<LogTraceParser> it = megaMatrix.getLog().traceIterator();
// for the entire log
for (int trace = 0; trace < megaMatrix.getLog().wholeLength(); trace++) {
LogTraceParser tr = it.next();
tr.init();
String traceString;
if (encodeOutputTasks) {
traceString = tr.encodeTrace();
} else {
traceString = tr.printStringTrace();
}
jsonOutput.add(
traceString,
tracesMeasuresJsonBuilder(megaMatrix, trace, measurementsParams, encodeOutputTasks, alphabet)
);
}
gson.toJson(jsonOutput, fw);
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
logger.debug("JSON readable serialization...DONE!");
}
}
| 43,379 | 44.471698 | 222 | java |
Janus | Janus-master/src/minerful/reactive/io/JanusVariantOutputManagementLauncher.java | package minerful.reactive.io;
import com.google.common.collect.Ordering;
import com.google.common.collect.TreeMultimap;
import minerful.MinerFulOutputManagementLauncher;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.params.SystemCmdParameters;
import minerful.reactive.params.JanusVariantCmdParameters;
import minerful.reactive.params.JanusPrintParameters;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
/**
* Class to handle the output of Janus
*/
public class JanusVariantOutputManagementLauncher extends MinerFulOutputManagementLauncher {
/**
* reads the terminal input parameters and launch the proper output functions
*
* @param variantResults
* @param varParams
* @param systemParams
* @param alphabet
* @param measurementsSpecification1
* @param measurementsSpecification2
*/
public void manageVariantOutput(Map<String, Float> variantResults,
JanusVariantCmdParameters varParams,
JanusPrintParameters janusViewParams,
SystemCmdParameters systemParams,
TaskCharArchive alphabet,
Map<String, Float> measurementsSpecification1,
Map<String, Float> measurementsSpecification2) {
File outputFile = null;
// ************* CSV
if (varParams.outputCvsFile != null) {
outputFile = retrieveFile(varParams.outputCvsFile);
logger.info("Saving variant analysis result as CSV in " + outputFile + "...");
double before = System.currentTimeMillis();
exportVariantResultsToCSV(variantResults, outputFile, varParams, alphabet, measurementsSpecification1, measurementsSpecification2);
double after = System.currentTimeMillis();
logger.info("Total CSV serialization time: " + (after - before));
}
if (janusViewParams != null && !janusViewParams.suppressResultsPrintOut) {
printVariantResultsToScreen(variantResults, varParams, alphabet, measurementsSpecification1, measurementsSpecification2);
}
// ************* JSON
if (varParams.outputJsonFile != null) {
outputFile = retrieveFile(varParams.outputJsonFile);
logger.info("Saving variant analysis result as JSON in " + outputFile + "...");
double before = System.currentTimeMillis();
// TODO
logger.info("JSON output yet not implemented");
double after = System.currentTimeMillis();
logger.info("Total JSON serialization time: " + (after - before));
}
}
private void printVariantResultsToScreen(Map<String, Float> variantResults, JanusVariantCmdParameters varParams, TaskCharArchive alphabet, Map<String, Float> measurementsSpecification1, Map<String, Float> measurementsSpecification2) {
// header row
System.out.println("--------------------");
System.out.println("relevant constraints differences");
System.out.println("CONSTRAINT : P_VALUE");
Map<Character, TaskChar> translationMap = alphabet.getTranslationMapById();
for (String constraint : variantResults.keySet()) {
if (variantResults.get(constraint) <= varParams.pValue) {
System.out.println(decodeConstraint(constraint, translationMap) + " : " + variantResults.get(constraint).toString() + " [Var1: " + measurementsSpecification1.get(constraint).toString() + " | Var2: " + measurementsSpecification2.get(constraint).toString() + "]");
}
}
}
private void exportVariantResultsToCSV(Map<String, Float> variantResults, File outputFile, JanusVariantCmdParameters varParams, TaskCharArchive alphabet, Map<String, Float> measurementsSpecification1, Map<String, Float> measurementsSpecification2) {
// header row
try {
String[] headerDetailed = {"Constraint", "p_value", "Measure_VAR1", "Measure_VAR2", "ABS-Difference", "Natural_Language_Description"};
FileWriter fwDetailed = new FileWriter(outputFile);
CSVPrinter printerDetailed = new CSVPrinter(fwDetailed, CSVFormat.DEFAULT.withHeader(headerDetailed).withDelimiter(';'));
String fileNameBestOf = outputFile.getAbsolutePath().substring(0, outputFile.getAbsolutePath().indexOf(".csv")).concat("[Best-" + varParams.bestNresults + "].txt");
String[] headerBestOf = {"RESULTS"};
FileWriter fwBestOf = new FileWriter(fileNameBestOf);
CSVPrinter printerBestOf = new CSVPrinter(fwBestOf, CSVFormat.DEFAULT.withHeader(headerBestOf).withDelimiter(';'));
// Sort results by difference in decreasing order
TreeMultimap<Float, String[]> sortedDiffResults = TreeMultimap.create(Ordering.natural().reverse(), Ordering.usingToString());
Map<Character, TaskChar> translationMap = alphabet.getTranslationMapById();
for (String constraint : variantResults.keySet()) {
// decode constraint
String decodedConstraint = decodeConstraint(constraint, translationMap);
// Row builder
float difference = Math.abs(measurementsSpecification1.get(constraint) - measurementsSpecification2.get(constraint));
sortedDiffResults.put(difference, new String[]{
decodedConstraint,
variantResults.get(constraint).toString(),
String.format("%.3f", measurementsSpecification1.get(constraint)),
String.format("%.3f", measurementsSpecification2.get(constraint)),
String.format("%.3f", difference),
getNaturalLanguageDescription(decodedConstraint, varParams.measure, measurementsSpecification1.get(constraint), measurementsSpecification2.get(constraint), difference, varParams)}
);
}
int counter = varParams.bestNresults;
boolean continueBest = true;
for (Float key : sortedDiffResults.keySet()) {
for (String[] line : sortedDiffResults.get(key)) {
printerDetailed.printRecord(line);
if (continueBest) printerBestOf.printRecord(line[line.length - 1]); //print only natural language
counter--; // first N results
}
if (counter < 0) continueBest = false;
// counter--; // First N distinct results
}
fwDetailed.close();
fwBestOf.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static final Map<String, String> DESCRIPTION = new HashMap<String, String>() {{
put("RespondedExistence", "if [$1] occurs, also [$2] occurs. ");
put("CoExistence", "[$1] and [$2] co-occur. ");
put("Succession", "[$1] is followed by [$2] and [$2] is preceded by [$1]. ");
put("Precedence", "if [$2] occurs, [$1] occurred before it. ");
put("Response", "if [$1] occurs, [$2] will occur afterwards. ");
put("AlternateSuccession", "[$1] is followed by [$2] and [$2] is preceded by [$1], without any other occurrence of [$1] and [$2] in between. ");
put("AlternatePrecedence", "if [$2] occurs, [$1] occurred before it without any other occurrence of [$2] in between. ");
put("AlternateResponse", "if [$1] occurs, [$2] will occur afterwards without any other occurrence of [$1] in between. ");
put("ChainSuccession", "[$1] is immediately followed by [$2] and [$2] is immediately preceded by [$1]. ");
put("ChainPrecedence", "if [$2] occurs, [$1] occurred immediately before it. ");
put("ChainResponse", "if [$1] occurs, [$2] occurs immediately afterwards. ");
put("NotCoExistence", "[$1] and [$2] do not occur in together in the same process instance. ");
put("NotSuccession", "[$1] is not followed by [$2] and [$2] is not preceded by [$1]. ");
put("NotChainSuccession", "[$1] is not immediately followed by [$2] and [$2] is not immediately preceded by [$1]. ");
put("Participation", "[$1] occurs in a process instance. ");
put("AtMostOne", "[$1] may occur at most one time in a process instance. ");
put("End", "the process ends with [$1]. ");
put("Init", "the process starts with [$1]. ");
}};
private String getNaturalLanguageDescription(String constraint, String measure, float var1measure, float var2measure, float difference, JanusVariantCmdParameters varParams) {
String template = constraint.split("\\(")[0];
String result;
if (Float.isNaN(difference)) {
if (Float.isNaN(var1measure)) {
if (var2measure < varParams.measureThreshold) result = "It may happen only in variant 2 that ";
else result = "It happens only in variant 2 that ";
} else {
if (var1measure < varParams.measureThreshold) result = "It may happen only in variant 1 that ";
else result = "It happens only in variant 1 that ";
}
} else {
String greaterVariance = (var1measure > var2measure) ? "1" : "2";
String smallerVariance = (var1measure > var2measure) ? "2" : "1";
result = "In variant " + greaterVariance + " it is " + String.format("%.1f", difference * 100) + "% more likely than variant" + smallerVariance + " that ";
// 3) .... In [Varaint 1/2] it is [diff %] more likely than [Variant 2/1]
}
if (DESCRIPTION.get(template) == null) {
logger.error("[Constraint without natural language description: " + template + "]");
result += "[Constraint without natural language description: " + template + "]";
} else result += DESCRIPTION.get(template);
if (!constraint.contains(",")) {
String task = constraint.split("\\(")[1].replace(")", "");
result = result.replace("$1", task);
} else {
String task1 = constraint.split("\\(")[1].replace(")", "").split(",")[0];
String task2 = constraint.split("\\(")[1].replace(")", "").split(",")[1];
result = result.replace("$1", task1).replace("$2", task2);
}
return result;
}
private String decodeConstraint(String encodedConstraint, Map<Character, TaskChar> translationMap) {
StringBuilder resultBuilder = new StringBuilder();
String constraint = encodedConstraint.substring(0, encodedConstraint.indexOf("("));
resultBuilder.append(constraint);
String[] encodedVariables = encodedConstraint.substring(encodedConstraint.indexOf("(")).replace("(", "").replace(")", "").split(",");
resultBuilder.append("(");
String decodedActivator = translationMap.get(encodedVariables[0].charAt(0)).toString();
resultBuilder.append(decodedActivator);
if (encodedVariables.length > 1) { //constraints with 2 variables
resultBuilder.append(",");
String decodedTarget = translationMap.get(encodedVariables[1].charAt(0)).toString();
resultBuilder.append(decodedTarget);
}
resultBuilder.append(")");
return resultBuilder.toString();
}
}
| 11,580 | 53.886256 | 278 | java |
Janus | Janus-master/src/minerful/reactive/measurements/Measures.java | package minerful.reactive.measurements;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
/**
* Class containing the measurement functions.
* <p>
* Most of the measures are taken from :
* Geng, Liqiang, and Howard J. Hamilton. ‘Interestingness Measures for Data Mining: A Survey’. ACM Computing Surveys 38, no. 3 (30 September 2006): 9-es. https://doi.org/10.1145/1132960.1132963.
*/
public class Measures {
static float[] currentTraceProbability = new float[9]; // performaces optimization: creating a new temporary array for each measurement makes the GC go in overhead with big datasets
// TODO improve this hard-code shame
public static String[] MEASURE_NAMES = {
"Support", // 0
"Confidence", // 1
"Recall", // 2
"Lovinger", // 3
"Specificity", // 4
"Accuracy", // 5
"Lift", // 6
"Leverage", // 7
"Compliance", // 8
"Odds Ratio", // 9
"Gini Index", // 10
"Certainty factor", // 11
"Coverage", // 12
"Prevalence", // 13
"Added Value", // 14
"Relative Risk", // 15
"Jaccard", // 16
"Ylue Q", // 17
"Ylue Y", // 18
"Klosgen", // 19
"Conviction", // 20
"Interestingness Weighting Dependency", // 21
"Collective Strength", // 22
"Laplace Correction", // 23
"J Measure", // 24
"One-way Support", // 25
"Two-way Support", // 26
"Two-way Support Variation", // 27
"Linear Correlation Coefficient", // 28
"Piatetsky-Shapiro", // 29
"Cosine", // 30
"Information Gain", // 31
"Sebag-Schoenauer", // 32
"Least Contradiction", // 33
"Odd Multiplier", // 34
"Example and Counterexample Rate", // 35
"Zhang" // 36
};
// TODO improve this hard-code shame
public static int MEASURE_NUM = MEASURE_NAMES.length;
/**
* Retrieves the name of a measure given its index
*
* @param measureIndex
* @return
*/
public static String getMeasureName(int measureIndex) {
return MEASURE_NAMES[measureIndex];
}
/**
* Retrieve the index of a measure given its name.
* WARNING case-sensitive
*
* @param measureName
* @return
*/
public static int getMeasureIndex(String measureName) {
// TODO make a stronger search not case-sensitive, ignoring spaces and symbols like bars
return ArrayUtils.indexOf(MEASURE_NAMES, measureName);
}
public static float getTraceMeasure(byte[] reactiveConstraintEvaluation, int measureIndex, boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
float[] traceProbabilities = getTraceProbabilities(reactiveConstraintEvaluation);
float result = getTraceMeasure(traceProbabilities, measureIndex);
// according to the input setting, substitute the measure value if it is NaN
if (nanTraceSubstituteFlag && Float.isNaN(result))
return (float) nanTraceSubstituteValue;
return result;
}
public static float getTraceMeasure(int[] traceEvaluation, int measureIndex, boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
float[] traceProbabilities = getTraceProbabilities(traceEvaluation);
float result = getTraceMeasure(traceProbabilities, measureIndex);
// according to the input setting, substitute the measure value if it is NaN
if (nanTraceSubstituteFlag && Float.isNaN(result))
return (float) nanTraceSubstituteValue;
return result;
}
// /**
// * LEGACY Generic method to return the trace measure for a specific measure using the single events evaluation.
// * <p>
// * The usage of this function is intended for batch measurement involving all measures, to avoid to call them one by one.
// *
// * @param reactiveConstraintEvaluation
// * @param measureIndex
// * @return
// */
// @Deprecated
// public static double getTraceMeasure(byte[] reactiveConstraintEvaluation, int measureIndex, boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
// // TODO improve this hard-code shame
// double result = 0;
// switch (measureIndex) {
// case 0:
//// support
// result = getTraceSupport(reactiveConstraintEvaluation);
// break;
// case 1:
//// confidence
// result = getTraceConfidence(reactiveConstraintEvaluation);
// break;
// case 2:
//// recall
// result = getTraceRecall(reactiveConstraintEvaluation);
// break;
// case 3:
//// Lovinger
// result = getTraceLovinger(reactiveConstraintEvaluation);
// break;
// case 4:
//// Specificity
// result = getTraceSpecificity(reactiveConstraintEvaluation);
// break;
// case 5:
//// Accuracy
// result = getTraceAccuracy(reactiveConstraintEvaluation);
// break;
// case 6:
//// Accuracy
// result = getTraceLift(reactiveConstraintEvaluation);
// break;
// case 7:
//// Leverage
// result = getTraceLeverage(reactiveConstraintEvaluation);
// break;
// case 8:
//// Compliance
// result = getTraceCompliance(reactiveConstraintEvaluation);
// break;
// case 9:
//// Odds Ratio
// result = getTraceOddsRatio(reactiveConstraintEvaluation);
// break;
// case 10:
//// Gini Index
// result = getTraceGiniIndex(reactiveConstraintEvaluation);
// break;
// case 11:
//// Certainty factor
// result = getTraceCertaintyFactor(reactiveConstraintEvaluation);
// break;
// case 12:
//// Coverage
// result = getTraceCoverage(reactiveConstraintEvaluation);
// break;
// case 13:
//// Prevalence
// result = getTracePrevalence(reactiveConstraintEvaluation);
// break;
// case 14:
//// Added Value
// result = getTraceAddedValue(reactiveConstraintEvaluation);
// break;
// case 15:
//// Relative Risk
// result = getTraceRelativeRisk(reactiveConstraintEvaluation);
// break;
// case 16:
//// Jaccard
// result = getTraceJaccard(reactiveConstraintEvaluation);
// break;
// case 17:
//// Ylue Q
// result = getTraceYlueQ(reactiveConstraintEvaluation);
// break;
// case 18:
//// Ylue Y
// result = getTraceYlueY(reactiveConstraintEvaluation);
// break;
// case 19:
//// Klosgen
// result = getTraceKlosgen(reactiveConstraintEvaluation);
// break;
// case 20:
//// Conviction
// result = getTraceConviction(reactiveConstraintEvaluation);
// break;
// case 21:
//// Interestingness Weighting Dependency
// result = getTraceInterestingnessWeightingDependency(reactiveConstraintEvaluation);
// break;
// case 22:
//// Collective Strength
// result = getTraceCollectiveStrength(reactiveConstraintEvaluation);
// break;
// case 23:
//// Laplace Correction
// result = getTraceLaplaceCorrection(reactiveConstraintEvaluation);
// break;
// case 24:
//// J Measure
// result = getTraceJMeasure(reactiveConstraintEvaluation);
// break;
// case 25:
//// One-way Support
// result = getTraceOneWaySupport(reactiveConstraintEvaluation);
// break;
// case 26:
//// Two-way Support
// result = getTraceTwoWaySupport(reactiveConstraintEvaluation);
// break;
// case 27:
//// Two-way Support Variation
// result = getTraceTwoWaySupportVariation(reactiveConstraintEvaluation);
// break;
// case 28:
//// Linear Correlation Coefficient
// result = getTraceLinearCorrelationCoefficient(reactiveConstraintEvaluation);
// break;
// case 29:
//// Piatetsky-Shapiro
// result = getTracePiatetskyShapiro(reactiveConstraintEvaluation);
// break;
// case 30:
//// Cosine
// result = getTraceCosine(reactiveConstraintEvaluation);
// break;
// case 31:
//// Information Gain
// result = getTraceInformationGain(reactiveConstraintEvaluation);
// break;
// case 32:
//// Sebag-Schoenauer
// result = getTraceSebagSchoenauer(reactiveConstraintEvaluation);
// break;
// case 33:
//// Least Contradiction
// result = getTraceLeastContradiction(reactiveConstraintEvaluation);
// break;
// case 34:
//// Odd Multiplier
// result = getTraceOddMultiplier(reactiveConstraintEvaluation);
// break;
// case 35:
//// Example and Counterexample Rate
// result = getTraceExampleCounterexampleRate(reactiveConstraintEvaluation);
// break;
// case 36:
//// Zhang
// result = getTraceZhang(reactiveConstraintEvaluation);
// break;
// }
//
// // according to the input setting, substitute the measure value if it is NaN
// if (nanTraceSubstituteFlag && Double.isNaN(result))
// return nanTraceSubstituteValue;
//
// return result;
//
// }
/**
* Generic method to return the log measure for a specific measure using the already computed log probabilities.
* <p>
* The usage of this function is intended for batch measurement involving all measures, to avoid to call them one by one.
*
* @param logProbabilities
* @param measureIndex
* @return
*/
public static float getLogMeasure(float[] logProbabilities, int measureIndex) {
/* TODO traces and log measures shares the same formulae,
you just have to compute the trace/log probabilities in different way.
Refactor functions to reflect this */
return getTraceMeasure(logProbabilities, measureIndex);
}
/**
* Generic method to return the trace measure for a specific measure using the already computed trace probabilities.
* <p>
* The usage of this function is intended for batch measurement involving all measures, to avoid to call them one by one.
*
* @param traceProbabilities
* @param measureIndex
* @return
*/
public static float getTraceMeasure(float[] traceProbabilities, int measureIndex) {
// TODO improve this hard-code shame
float result = 0;
switch (measureIndex) {
case 0:
// support
result = getTraceSupport(traceProbabilities);
break;
case 1:
// confidence
result = getTraceConfidence(traceProbabilities);
break;
case 2:
// recall
result = getTraceRecall(traceProbabilities);
break;
case 3:
// Lovinger
result = getTraceLovinger(traceProbabilities);
break;
case 4:
// Specificity
result = getTraceSpecificity(traceProbabilities);
break;
case 5:
// Accuracy
result = getTraceAccuracy(traceProbabilities);
break;
case 6:
// Accuracy
result = getTraceLift(traceProbabilities);
break;
case 7:
// Leverage
result = getTraceLeverage(traceProbabilities);
break;
case 8:
// Compliance
result = getTraceCompliance(traceProbabilities);
break;
case 9:
// Odds Ratio
result = getTraceOddsRatio(traceProbabilities);
break;
case 10:
// Gini Index
result = getTraceGiniIndex(traceProbabilities);
break;
case 11:
// Certainty factor
result = getTraceCertaintyFactor(traceProbabilities);
break;
case 12:
// Coverage
result = getTraceCoverage(traceProbabilities);
break;
case 13:
// Prevalence
result = getTracePrevalence(traceProbabilities);
break;
case 14:
// Added Value
result = getTraceAddedValue(traceProbabilities);
break;
case 15:
// Relative Risk
result = getTraceRelativeRisk(traceProbabilities);
break;
case 16:
// Jaccard
result = getTraceJaccard(traceProbabilities);
break;
case 17:
// Ylue Q
result = getTraceYlueQ(traceProbabilities);
break;
case 18:
// Ylue Y
result = getTraceYlueY(traceProbabilities);
break;
case 19:
// Klosgen
result = getTraceKlosgen(traceProbabilities);
break;
case 20:
// Conviction
result = getTraceConviction(traceProbabilities);
break;
case 21:
// Interestingness Weighting Dependency
result = getTraceInterestingnessWeightingDependency(traceProbabilities);
break;
case 22:
// Collective Strength
result = getTraceCollectiveStrength(traceProbabilities);
break;
case 23:
// Laplace Correction
result = getTraceLaplaceCorrection(traceProbabilities);
break;
case 24:
// J Measure
result = getTraceJMeasure(traceProbabilities);
break;
case 25:
// One-way Support
result = getTraceOneWaySupport(traceProbabilities);
break;
case 26:
// Two-way Support
result = getTraceTwoWaySupport(traceProbabilities);
break;
case 27:
// Two-way Support Variation
result = getTraceTwoWaySupportVariation(traceProbabilities);
break;
case 28:
// Linear Correlation Coefficient
result = getTraceLinearCorrelationCoefficient(traceProbabilities);
break;
case 29:
// Piatetsky-Shapiro
result = getTracePiatetskyShapiro(traceProbabilities);
break;
case 30:
// Cosine
result = getTraceCosine(traceProbabilities);
break;
case 31:
// Information Gain
result = getTraceInformationGain(traceProbabilities);
break;
case 32:
// Sebag-Schoenauer
result = getTraceSebagSchoenauer(traceProbabilities);
break;
case 33:
// Least Contradiction
result = getTraceLeastContradiction(traceProbabilities);
break;
case 34:
// Odd Multiplier
result = getTraceOddMultiplier(traceProbabilities);
break;
case 35:
// Example and Counterexample Rate
result = getTraceExampleCounterexampleRate(traceProbabilities);
break;
case 36:
// Zhang
result = getTraceZhang(traceProbabilities);
break;
}
return result;
}
/**
* From the events evaluation, retrieve the probabilities of both activator and target (plus their negatives) formula of a reactive constraint.
* * i.e. P(A),P(T),P(¬A),P(¬T)
* and the probabilities of the combinations of activator and target formula of a reactive constraint.
* * i.e. P(¬A¬T),P(A¬T),P(¬AT),P(¬A¬T)
* and the lenght of the trace
*
* @param reactiveConstraintEvaluation
* @return
*/
public static float[] getTraceProbabilities(byte[] reactiveConstraintEvaluation) {
// float[] currentTraceProbability = new float[9];
// if (reactiveConstraintEvaluation.length == 0) return currentTraceProbability;
if (reactiveConstraintEvaluation.length == 0) return new float[9];
currentTraceProbability = new float[9];
// result { 0: activation, 1: target, 2: no activation, 3: no target}
// result {4: 00, 5: 01, , 6: 10, 7:11}
for (byte eval : reactiveConstraintEvaluation) {
currentTraceProbability[0] += eval / 2; // the activator is true if the byte is >1, i.e. 2 or 3
currentTraceProbability[1] += eval % 2; // the target is true if the byte is odd, i,e, 1 or 3
currentTraceProbability[eval + 4]++;
}
float l = reactiveConstraintEvaluation.length;
currentTraceProbability[2] = l - currentTraceProbability[0];
currentTraceProbability[3] = l - currentTraceProbability[1];
currentTraceProbability[0] /= l;
currentTraceProbability[1] /= l;
currentTraceProbability[2] /= l;
currentTraceProbability[3] /= l;
currentTraceProbability[4] /= l;
currentTraceProbability[5] /= l;
currentTraceProbability[6] /= l;
currentTraceProbability[7] /= l;
currentTraceProbability[8] = l;
return currentTraceProbability;
}
/**
* From the trace evaluation, retrieve the probabilities of both activator and target (plus their negatives) formula of a reactive constraint.
* * i.e. P(A),P(T),P(¬A),P(¬T)
* and the probabilities of the combinations of activator and target formula of a reactive constraint.
* * i.e. P(¬A¬T),P(A¬T),P(¬AT),P(¬A¬T)
* and the lenght of the trace
*
* @param traceEvaluation
* @return
*/
public static float[] getTraceProbabilities(int[] traceEvaluation) {
float[] result = new float[9];
if (traceEvaluation.length == 0) return result;
float l = traceEvaluation[8];
result[0] = traceEvaluation[0] / l;
result[1] = traceEvaluation[1] / l;
result[2] = traceEvaluation[2] / l;
result[3] = traceEvaluation[3] / l;
result[4] = traceEvaluation[4] / l;
result[5] = traceEvaluation[5] / l;
result[6] = traceEvaluation[6] / l;
result[7] = traceEvaluation[7] / l;
result[8] = l;
return result;
}
/**
* Retrieve the probabilities of both activator and target (plus their negatives) formula of a reactive constraint.
*
* @param reactiveConstraintEvaluation byte array of {0,1,2,3} encoding the bolean evaluation of both the activator and the target of a reactive constraint
* @return
*/
public static float[] getReactiveProbabilities(byte[] reactiveConstraintEvaluation) {
float[] result = {0, 0, 0, 0}; // result { 0: activation, 1: target, 2: no activation, 3: no target}
if (reactiveConstraintEvaluation.length == 0) return result;
for (byte eval : reactiveConstraintEvaluation) {
result[0] += eval / 2; // the activator is true if the byte is >1, i.e. 2 or 3
result[1] += eval % 2; // the target is true if the byte is odd, i,e, 1 or 3
}
result[2] = reactiveConstraintEvaluation.length - result[0];
result[3] = reactiveConstraintEvaluation.length - result[1];
result[0] /= reactiveConstraintEvaluation.length;
result[1] /= reactiveConstraintEvaluation.length;
result[2] /= reactiveConstraintEvaluation.length;
result[3] /= reactiveConstraintEvaluation.length;
return result;
}
/**
* Retrieve the probabilities of the combinations of activator and target formula of a reactive constraint.
* i.e. P(¬A¬T),P(A¬T),P(¬AT),P(¬A¬T)
*
* @param reactiveConstraintEvaluation byte array of {0,1,2,3} encoding the bolean evaluation of both the activator and the target of a reactive constraint
* @return
*/
public static float[] getReactiveIntersectionsProbabilities(byte[] reactiveConstraintEvaluation) {
float[] result = {0, 0, 0, 0}; // result {0: 00, 1: 01, , 2: 10, 3:11}
if (reactiveConstraintEvaluation.length == 0) return result;
for (byte eval : reactiveConstraintEvaluation) {
result[eval]++;
}
result[0] /= reactiveConstraintEvaluation.length;
result[1] /= reactiveConstraintEvaluation.length;
result[2] /= reactiveConstraintEvaluation.length;
result[3] /= reactiveConstraintEvaluation.length;
return result;
}
/**
* Retrieve the probability of a formula holding true in a trace given its evaluation on the trace.
* BEWARE: this probability is defined for a single formula, not the entire reactive constraint A->B
*
* @param formulaEvaluation Byte array (representing a bit array) of 0s and 1s
* @return
*/
public static float getFormulaProbability(byte[] formulaEvaluation) {
if (formulaEvaluation.length == 0) return 0;
float result = 0;
for (byte eval : formulaEvaluation) {
result += eval;
}
return result / formulaEvaluation.length;
}
/**
* retrieve the support measure of a constraint for a given trace.
* <p>
* The support measure is defined as:
* Supp(A->T) = P(A' intersection T') =
*
* @return
*/
public static float getTraceSupport(byte[] reactiveConstraintEvaluation) {
if (reactiveConstraintEvaluation.length == 0) return 0;
float result = 0;
for (byte eval : reactiveConstraintEvaluation) {
result += eval / 3; // activator and target are both true when the byte is 3
}
return result / reactiveConstraintEvaluation.length;
// float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
// float pA = p[0];
// float pnA = p[2];
// float pT = p[1];
// float pnT = p[3];
// float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
// float pnAnT = pIntersection[0];
// float pnAT = pIntersection[1];
// float pAnT = pIntersection[2];
// float pAT = pIntersection[3];
//
// return pAT;
}
/**
* retrieve the support measure of a constraint for a given trace.
* <p>
* The support measure is defined as:
* Supp(A->T) = P(A' intersection T') =
*
* @return
*/
public static float getTraceSupport(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
return pAT;
}
/**
* retrieve the confidence of a constraint for a given trace.
* <p>
* The confidence measure is defined as:
* Conf(A->T) = P(T'|A') = P(T' intersection A') / P(A') = Supp(A'->T')/P(A')
*
* @return
*/
public static float getTraceConfidence(byte[] reactiveConstraintEvaluation) {
byte[] activatorEval = getActivatorEvaluation(reactiveConstraintEvaluation);
float denominator = getFormulaProbability(activatorEval);
// if (denominator == 0) return float.NaN;
return getTraceSupport(reactiveConstraintEvaluation) / denominator;
// float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
// float pA = p[0];
// float pnA = p[2];
// float pT = p[1];
// float pnT = p[3];
// float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
// float pnAnT = pIntersection[0];
// float pnAT = pIntersection[1];
// float pAnT = pIntersection[2];
// float pAT = pIntersection[3];
//
// float result= pAT / pA;
//
// if (float.isNaN(result)){
// return 0;
// }
// else {
// return result;
// }
}
/**
* retrieve the confidence of a constraint for a given trace.
* <p>
* The confidence measure is defined as:
* Conf(A->T) = P(T'|A') = P(T' intersection A') / P(A') = Supp(A'->T')/P(A')
*
* @return
*/
public static float getTraceConfidence(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = pAT / pA;
return result;
}
/**
* retrieve the recall of a constraint for a given trace.
* <p>
* The recall measure is defined as:
* Recall(A->T) = P(A'|T') = P(T' intersection A') / P(T') = Supp(A'->T')/P(T')
*
* @return
*/
public static float getTraceRecall(byte[] reactiveConstraintEvaluation) {
byte[] targetEval = getTargetEvaluation(reactiveConstraintEvaluation);
float denominator = getFormulaProbability(targetEval);
// if (denominator == 0) return 0;
return getTraceSupport(reactiveConstraintEvaluation) / denominator;
// float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
// float pA = p[0];
// float pnA = p[2];
// float pT = p[1];
// float pnT = p[3];
// float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
// float pnAnT = pIntersection[0];
// float pnAT = pIntersection[1];
// float pAnT = pIntersection[2];
// float pAT = pIntersection[3];
//
// float result= pAT / pT;
//
// if (float.isNaN(result)){
// return 0;
// }
// else {
// return result;
// }
}
/**
* retrieve the recall of a constraint for a given trace.
* <p>
* The recall measure is defined as:
* Recall(A->T) = P(A'|T') = P(T' intersection A') / P(T') = Supp(A'->T')/P(T')
*
* @return
*/
public static float getTraceRecall(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = pAT / pT;
return result;
}
/**
* Retrieve the Lovinger's Measure of a constraint for a given trace.
* <p>
* The Lovinger's measure is defined as:
* Lov(A->T) = 1 − ((P(A)P(¬T))/P(A¬T)))
*
* @return
*/
public static float getTraceLovinger(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = 1 - ((pA * pnT) / (pAnT));
return result;
}
/**
* Retrieve the Lovinger's Measure of a constraint for a given trace.
* <p>
* The Lovinger's measure is defined as:
* Lov(A->T) = 1 − ((P(A)P(¬T))/P(A¬T)))
*
* @return
*/
public static float getTraceLovinger(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = 1 - ((pA * pnT) / (pAnT));
return result;
}
/**
* Retrieve the Specificity Measure of a constraint for a given trace.
* <p>
* The Specificity measure is defined as:
* Specificity(A->T) = P(¬T'|¬A') = (Conf(¬A'->¬T'))
*
* @return
*/
public static float getTraceSpecificity(byte[] reactiveConstraintEvaluation) {
return getTraceConfidence(getNegativeReactiveConstraintEvaluation(reactiveConstraintEvaluation));
// TODO test the validity of this function with experiment del:e-Response(e,f)
// float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
// float pA = p[0];
// float pnA = p[2];
// float pT = p[1];
// float pnT = p[3];
// float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
// float pnAnT = pIntersection[0];
// float pnAT = pIntersection[1];
// float pAnT = pIntersection[2];
// float pAT = pIntersection[3];
//
// float result= pnAnT / pnA;
//
// if (float.isNaN(result)){
// return 0;
// }
// else {
// return result;
// }
}
/**
* Retrieve the Specificity Measure of a constraint for a given trace.
* <p>
* The Specificity measure is defined as:
* Specificity(A->T) = P(¬T'|¬A') = (Conf(¬A'->¬T'))
*
* @return
*/
public static float getTraceSpecificity(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = pnAnT / pnA;
return result;
}
/**
* Retrieve the Accuracy Measure of a constraint for a given trace.
* <p>
* The Accuracy measure is defined as:
* Accuracy(A->T) = P(T' intersection A') + P(¬T' intersection ¬A') = (Supp(A'->T') + Supp(¬A'->¬T'))
*
* @return
*/
public static float getTraceAccuracy(byte[] reactiveConstraintEvaluation) {
return getTraceSupport(reactiveConstraintEvaluation) + getTraceSupport(getNegativeReactiveConstraintEvaluation(reactiveConstraintEvaluation));
// float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
// float pA = p[0];
// float pnA = p[2];
// float pT = p[1];
// float pnT = p[3];
// float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
// float pnAnT = pIntersection[0];
// float pnAT = pIntersection[1];
// float pAnT = pIntersection[2];
// float pAT = pIntersection[3];
//
// return pAT + pnAnT;
}
/**
* Retrieve the Accuracy Measure of a constraint for a given trace.
* <p>
* The Accuracy measure is defined as:
* Accuracy(A->T) = P(T' intersection A') + P(¬T' intersection ¬A') = (Supp(A'->T') + Supp(¬A'->¬T'))
*
* @return
*/
public static float getTraceAccuracy(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = pAT + pnAnT;
return result;
}
/**
* Retrieve the Lift Measure of a constraint for a given trace.
* <p>
* The Lift measure is defined as:
* Specificity(A->T) = P(T'|A') / P(T') = (Conf(A'->T') / P(T'))
*
* @return
*/
public static float getTraceLift(byte[] reactiveConstraintEvaluation) {
byte[] targetEval = getTargetEvaluation(reactiveConstraintEvaluation);
float denominator = getFormulaProbability(targetEval);
// if (denominator == 0) return 0;
return getTraceConfidence(reactiveConstraintEvaluation) / denominator;
// float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
// float pA = p[0];
// float pnA = p[2];
// float pT = p[1];
// float pnT = p[3];
// float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
// float pnAnT = pIntersection[0];
// float pnAT = pIntersection[1];
// float pAnT = pIntersection[2];
// float pAT = pIntersection[3];
//
// float result= pAT / (pA * pT);
//
// if (float.isNaN(result)){
// return 0;
// }
// else {
// return result;
// }
}
/**
* Retrieve the Lift Measure of a constraint for a given trace.
* <p>
* The Lift measure is defined as:
* Specificity(A->T) = P(T'|A') / P(T') = (Conf(A'->T') / P(T'))
*
* @return
*/
public static float getTraceLift(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = pAT / (pA * pT);
return result;
}
/**
* Retrieve the Leverage Measure of a constraint for a given trace.
* <p>
* The Leverage measure is defined as:
* Specificity(A->T) = P(T'|A') - P(A')P(T') = (Conf(A'->T') - P(A')P(T'))
*
* @return
*/
public static float getTraceLeverage(byte[] reactiveConstraintEvaluation) {
byte[] activatorEval = getActivatorEvaluation(reactiveConstraintEvaluation);
byte[] targetEval = getTargetEvaluation(reactiveConstraintEvaluation);
float pA = getFormulaProbability(activatorEval);
float pT = getFormulaProbability(targetEval);
return getTraceConfidence(reactiveConstraintEvaluation) - (pA * pT);
// float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
// float pA = p[0];
// float pnA = p[2];
// float pT = p[1];
// float pnT = p[3];
// float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
// float pnAnT = pIntersection[0];
// float pnAT = pIntersection[1];
// float pAnT = pIntersection[2];
// float pAT = pIntersection[3];
//
// float result= (pAT / pA) - pA * pT;
//
// if (float.isNaN(result)){
// return 0;
// }
// else {
// return result;
// }
}
/**
* Retrieve the Leverage Measure of a constraint for a given trace.
* <p>
* The Leverage measure is defined as:
* Specificity(A->T) = P(T'|A') - P(A')P(T') = (Conf(A'->T') - P(A')P(T'))
*
* @return
*/
public static float getTraceLeverage(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = (pAT / pA) - (pA * pT);
return result;
}
/**
* ORIGINAL MEASURE! Retrieve the Compliance Measure of a constraint for a given trace.
* We developed this measure to emulate the original support intuition,
* i.e., the percentage of the trace which do not conflict with the constraint.
* Thus we count all the points except the active violations (activation true but target false)
* <p>
* The Compliance measure is defined as:
* Compliance(A->T) = 1 - P(A' intersection ¬T')
*
* @return
*/
public static float getTraceCompliance(byte[] reactiveConstraintEvaluation) {
if (reactiveConstraintEvaluation.length == 0) return 0;
float result = 0;
for (byte eval : reactiveConstraintEvaluation) {
if (eval == 2) { // activator true but target false when byte equal to 2
result++;
}
}
return 1 - result / reactiveConstraintEvaluation.length;
// float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
// float pA = p[0];
// float pnA = p[2];
// float pT = p[1];
// float pnT = p[3];
// float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
// float pnAnT = pIntersection[0];
// float pnAT = pIntersection[1];
// float pAnT = pIntersection[2];
// float pAT = pIntersection[3];
//
// return 1 - pAnT;
}
/**
* ORIGINAL MEASURE! Retrieve the Compliance Measure of a constraint for a given trace.
* We developed this measure to emulate the original support intuition,
* i.e., the percentage of the trace which do not conflict with the constraint.
* Thus we count all the points except the active violations (activation true but target false)
* <p>
* The Compliance measure is defined as:
* Compliance(A->T) = 1 - P(A' intersection ¬T')
*
* @return
*/
public static float getTraceCompliance(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = 1 - pAnT;
return result;
}
/**
* Retrieve the Odds Ratio Measure of a constraint for a given trace.
* <p>
* The Odds Ratio measure is defined as:
* OddsRatio(A->T) = ( P(A' intersection T') P(¬A' intersection ¬T') ) / ( P(A' intersection ¬T') P(¬A' intersection T') )
*
* @return
*/
public static float getTraceOddsRatio(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = (pAT * pnAnT) / (pAnT * pnAT);
return result;
}
/**
* Retrieve the Odds Ratio Measure of a constraint for a given trace.
* <p>
* The Odds Ratio measure is defined as:
* OddsRatio(A->T) = ( P(A' intersection T') P(¬A' intersection ¬T') ) / ( P(A' intersection ¬T') P(¬A' intersection T') )
*
* @return
*/
public static float getTraceOddsRatio(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = (pAT * pnAnT) / (pAnT * pnAT);
return result;
}
/**
* Retrieve the Gini Index Measure of a constraint for a given trace.
* <p>
* The Gini Index measure is defined as:
* GiniIndex(A->T) = P(A) ∗ {P(B|A)^2 + P(¬B|A)^2} + P(¬A) ∗ {P(B|¬A)^2 * +P(¬B|¬A)^2} − P(B)^2 − P(¬B)^2
*
* @return
*/
public static float getTraceGiniIndex(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = pA * (Math.pow((pAT / pA), 2) + Math.pow(pAnT / pA, 2)) + pnA * (Math.pow(pnAT / pnA, 2) + Math.pow(pnAnT / pnA, 2)) - Math.pow(pT, 2) - Math.pow(pnT, 2);
return (float) result;
}
/**
* Retrieve the Gini Index Measure of a constraint for a given trace.
* <p>
* The Gini Index measure is defined as:
* GiniIndex(A->T) = P(A) ∗ {P(B|A)^2 + P(¬B|A)^2} + P(¬A) ∗ {P(B|¬A)^2 * +P(¬B|¬A)^2} − P(B)^2 − P(¬B)^2
*
* @return
*/
public static float getTraceGiniIndex(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
double result = pA * (Math.pow((pAT / pA), 2) + Math.pow(pAnT / pA, 2)) + pnA * (Math.pow(pnAT / pnA, 2) + Math.pow(pnAnT / pnA, 2)) - Math.pow(pT, 2) - Math.pow(pnT, 2);
return (float) result;
}
/**
* Retrieve the Certainty Factor Measure of a constraint for a given trace.
* <p>
* The Certainty Factor measure is defined as:
* CertaintyFactor(A->T) = (P(B|A) − P(B))/(1 − P(B))
*
* @return
*/
public static float getTraceCertaintyFactor(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = ((pAT / pA) - pT) / (1 - pT);
return result;
}
/**
* Retrieve the Certainty Factor Measure of a constraint for a given trace.
* <p>
* The Certainty Factor measure is defined as:
* CertaintyFactor(A->T) = (P(B|A) − P(B))/(1 − P(B))
*
* @return
*/
public static float getTraceCertaintyFactor(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = ((pAT / pA) - pT) / (1 - pT);
return result;
}
/**
* Retrieve the Coverage Measure of a constraint for a given trace.
* <p>
* The coverage measure is defined as:
* Coverage(A->T) = P(A)
*
* @return
*/
public static float getTraceCoverage(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = pA;
return result;
}
/**
* Retrieve the Coverage Measure of a constraint for a given trace.
* <p>
* The coverage measure is defined as:
* Coverage(A->T) = P(A)
*
* @return
*/
public static float getTraceCoverage(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = pA;
return result;
}
/**
* Retrieve the Prevalence Measure of a constraint for a given trace.
* <p>
* The prevalence measure is defined as:
* Prevalence(A->T) = P(T)
*
* @return
*/
public static float getTracePrevalence(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = pT;
return result;
}
/**
* Retrieve the Prevalence Measure of a constraint for a given trace.
* <p>
* The prevalence measure is defined as:
* Prevalence(A->T) = P(T)
*
* @return
*/
public static float getTracePrevalence(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = pT;
return result;
}
/**
* Retrieve the Added Value Measure of a constraint for a given trace.
* <p>
* The Added Value measure is defined as:
* AddedValue(A->T) = P(T|A)-P(T) = P(AT)/P(A) - P(T)
*
* @return
*/
public static float getTraceAddedValue(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = pAT / pA - pT;
return result;
}
/**
* Retrieve the Added Value Measure of a constraint for a given trace.
* <p>
* The Added Value measure is defined as:
* AddedValue(A->T) = P(T|A)-P(T) = P(AT)/P(A) - P(T)
*
* @return
*/
public static float getTraceAddedValue(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = (pAT / pA) - pT;
return result;
}
/**
* Retrieve the Relative Risk Measure of a constraint for a given trace.
* <p>
* The Relative Risk measure is defined as:
* RelativeRisk(A->T) = P(T|A)/P(T|¬A) = ( P(AT)/P(A) ) / ( P(¬AT)/P(¬A) )
*
* @return
*/
public static float getTraceRelativeRisk(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = (pAT / pA) / (pnAT / pnA);
return result;
}
/**
* Retrieve the Relative Risk Measure of a constraint for a given trace.
* <p>
* The Relative Risk measure is defined as:
* RelativeRisk(A->T) = P(T|A)/P(T|¬A) = ( P(AT)/P(A) ) / ( P(¬AT)/P(¬A) )
*
* @return
*/
public static float getTraceRelativeRisk(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = (pAT / pA) / (pnAT / pnA);
return result;
}
/**
* Retrieve the Jaccard Measure of a constraint for a given trace.
* <p>
* The Jaccard measure is defined as:
* Jaccard(A->T) = P(AT)/ ( P(A)+P(T) - P(AT) )
*
* @return
*/
public static float getTraceJaccard(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = pAT / (pA + pT - pAT);
return result;
}
/**
* Retrieve the Jaccard Measure of a constraint for a given trace.
* <p>
* The Jaccard measure is defined as:
* Jaccard(A->T) = P(AT)/ ( P(A)+P(T) - P(AT) )
*
* @return
*/
public static float getTraceJaccard(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = pAT / (pA + pT - pAT);
return result;
}
/**
* Retrieve the Ylue Q Measure of a constraint for a given trace.
* <p>
* The Ylue Q measure is defined as:
* YlueQ(A->T) = ( P(AT) P(¬A¬T) - P(A¬T)P(¬AT) ) / ( P(AT) P(¬A¬T) + P(A¬T)P(¬AT) )
*
* @return
*/
public static float getTraceYlueQ(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = (pAT * pnAnT - pAnT * pnAT) / (pAT * pnAnT + pAnT * pnAT);
return result;
}
/**
* Retrieve the Ylue Q Measure of a constraint for a given trace.
* <p>
* The Ylue Q measure is defined as:
* YlueQ(A->T) = ( P(AT) P(¬A¬T) - P(A¬T)P(¬AT) ) / ( P(AT) P(¬A¬T) + P(A¬T)P(¬AT) )
*
* @return
*/
public static float getTraceYlueQ(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = ((pAT * pnAnT) - (pAnT * pnAT)) / ((pAT * pnAnT) + (pAnT * pnAT));
return result;
}
/**
* Retrieve the Ylue Y Measure of a constraint for a given trace.
* <p>
* The Ylue Y measure is defined as:
* YlueY(A->T) = ( (P(AT) P(¬A¬T))^1/2 - (P(A¬T)P(¬AT))^1/2 ) / ( (P(AT) P(¬A¬T))^1/2 + (P(A¬T)P(¬AT))^1/2 )
*
* @return
*/
public static float getTraceYlueY(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = (Math.sqrt(pAT * pnAnT) - Math.sqrt(pAnT * pnAT)) / (Math.sqrt(pAT * pnAnT) + Math.sqrt(pAnT * pnAT));
return (float) result;
}
/**
* Retrieve the Ylue Y Measure of a constraint for a given trace.
* <p>
* The Ylue Y measure is defined as:
* YlueY(A->T) = ( (P(AT) P(¬A¬T))^1/2 - (P(A¬T)P(¬AT))^1/2 ) / ( (P(AT) P(¬A¬T))^1/2 + (P(A¬T)P(¬AT))^1/2 )
*
* @return
*/
public static float getTraceYlueY(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
double result = (Math.sqrt(pAT * pnAnT) - Math.sqrt(pAnT * pnAT)) / (Math.sqrt(pAT * pnAnT) + Math.sqrt(pAnT * pnAT));
return (float) result;
}
/**
* Retrieve the Klosgen Measure of a constraint for a given trace.
* <p>
* The Klosgen measure is defined as:
* Klosgen(A->T) = P(AT)^1/2 * Max( P(T|A) - P(T) , P(A|T) -P(A) ) = P(AT)^1/2 * Max( P(AT)/P(A) - P(T) , P(AT)/P(T) -P(A) )
*
* @return
*/
public static float getTraceKlosgen(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = Math.sqrt(pAT) * Math.max(pAT / pA - pT, pAT / pT - pA);
return (float) result;
}
/**
* Retrieve the Klosgen Measure of a constraint for a given trace.
* <p>
* The Klosgen measure is defined as:
* Klosgen(A->T) = P(AT)^1/2 * Max( P(T|A) - P(T) , P(A|T) -P(A) ) = P(AT)^1/2 * Max( P(AT)/P(A) - P(T) , P(AT)/P(T) -P(A) )
*
* @return
*/
public static float getTraceKlosgen(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
double result = Math.sqrt(pAT) * Math.max(((pAT / pA) - pT), ((pAT / pT) - pA));
return (float) result;
}
/**
* Retrieve the Conviction Measure of a constraint for a given trace.
* <p>
* The Conviction measure is defined as:
* Conviction(A->T) = ( P(A) P(¬T)) / P(A¬T)
*
* @return
*/
public static float getTraceConviction(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = (pA * pnT) / pAnT;
return result;
}
/**
* Retrieve the Conviction Measure of a constraint for a given trace.
* <p>
* The Conviction measure is defined as:
* Conviction(A->T) = ( P(A) P(¬T)) / P(A¬T)
*
* @return
*/
public static float getTraceConviction(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = (pA * pnT) / pAnT;
return result;
}
/**
* Retrieve the Interestingness Weighting Dependency Measure of a constraint for a given trace.
* <p>
* The Interestingness Weighting Dependency measure is defined as:
* InterestingnessWeightingDependency(A->T) = ( (P(AT)/( P(A)P(T) ))^k -1) * (P(AT))^m
* we assume m=2 and k=2 like in (Le and Lo 2015)
*
* @return
*/
public static float getTraceInterestingnessWeightingDependency(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
int m = 2;
int k = 2;
double result = (Math.pow(pAT / (pA * pT), k) - 1) * Math.pow(pAT, m);
return (float) result;
}
/**
* Retrieve the Interestingness Weighting Dependency Measure of a constraint for a given trace.
* <p>
* The Interestingness Weighting Dependency measure is defined as:
* InterestingnessWeightingDependency(A->T) = ( (P(AT)/( P(A)P(T) ))^k -1) * (P(AT))^m
* we assume m=2 and k=2 like in (Le and Lo 2015)
*
* @return
*/
public static float getTraceInterestingnessWeightingDependency(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
int m = 2;
int k = 2;
double result = (Math.pow(pAT / (pA * pT), k) - 1) * Math.pow(pAT, m);
return (float) result;
}
/**
* Retrieve the Collective Strength Measure of a constraint for a given trace.
* <p>
* The Collective Strength measure is defined as:
* CollectiveStrength(A->T) = ( P(AT)+P(¬T|¬A) )/( P(A)P(T)+P(¬A)P(¬B) ) * ( 1-P(A)P(T)-P(¬A)P(¬T) )/( 1-P(AT)-P(¬T|¬A) ) =
* = ( P(AT)+P(¬T¬A)/P(¬A) )/( P(A)P(T)+P(¬A)P(¬B) ) * ( 1-P(A)P(T)-P(¬A)P(¬T) )/( 1-P(AT)-P(¬T¬A)/P(¬A) )
*
* @return
*/
public static float getTraceCollectiveStrength(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = (pAT + (pnAnT / pnA)) / (pA * pT + pnA * pnT) * (1 - pA * pT - pnA * pnT) / (1 - pAT - (pnAnT / pnA));
return result;
}
/**
* Retrieve the Collective Strength Measure of a constraint for a given trace.
* <p>
* The Collective Strength measure is defined as:
* CollectiveStrength(A->T) = ( P(AT)+P(¬T|¬A) )/( P(A)P(T)+P(¬A)P(¬B) ) * ( 1-P(A)P(T)-P(¬A)P(¬T) )/( 1-P(AT)-P(¬T|¬A) ) =
* = ( P(AT)+P(¬T¬A)/P(¬A) )/( P(A)P(T)+P(¬A)P(¬B) ) * ( 1-P(A)P(T)-P(¬A)P(¬T) )/( 1-P(AT)-P(¬T¬A)/P(¬A) )
*
* @return
*/
public static float getTraceCollectiveStrength(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float result = ((pAT + (pnAnT / pnA)) / ((pA * pT) + (pnA * pnT))) * ((1 - (pA * pT) - (pnA * pnT)) / (1 - pAT - (pnAnT / pnA)));
return result;
}
/**
* Retrieve the Laplace Correction Measure of a constraint for a given trace.
* <p>
* The Laplace Correction measure is defined as:
* LaplaceCorrection(A->T) = ( N(AT) +1 ) / (N(A) + 2) = (n*P(AT) +1)/(n*P(A)+2)
* where N(x) is not the probability but the number of occurrence of x in the trace, thus n=trace length
* e.g. P(AB) = N(AB)/Length(Trace)
*
* @return
*/
public static float getTraceLaplaceCorrection(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
int n = reactiveConstraintEvaluation.length;
float result = (n * pAT + 1) / (n * pA + 2);
return result;
}
/**
* Retrieve the Laplace Correction Measure of a constraint for a given trace.
* <p>
* The Laplace Correction measure is defined as:
* LaplaceCorrection(A->T) = ( N(AT) +1 ) / (N(A) + 2) = (n*P(AT) +1)/(n*P(A)+2)
* where N(x) is not the probability but the number of occurrence of x in the trace, thus n=trace length
* e.g. P(AB) = N(AB)/Length(Trace)
*
* @return
*/
public static float getTraceLaplaceCorrection(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
// int n = reactiveConstraintEvaluation.length;
// TODO check ranges of this function: supposedly [0.5-1], but there are lower results
float result = (n * pAT + 1) / (n * pA + 2);
return result;
}
/**
* Retrieve the J-Measure Measure of a constraint for a given trace.
* <p>
* The J-Measure measure is defined as:
* JMeasure(A->T) = P(AT) log(P(T|A)/P(T)) + P(A¬T) log( P(¬T|A)/P(¬T) )
*
* @return
*/
public static float getTraceJMeasure(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = pAT * Math.log((pAT / pA) / pT) + pAnT * Math.log((pAnT / pA) / pnT);
return (float) result;
}
/**
* Retrieve the J-Measure Measure of a constraint for a given trace.
* <p>
* The J-Measure measure is defined as:
* JMeasure(A->T) = P(AT) log(P(T|A)/P(T)) + P(A¬T) log( P(¬T|A)/P(¬T) )
*
* @return
*/
public static float getTraceJMeasure(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
double result = (pAT * Math.log((pAT / pA) / pT)) + (pAnT * Math.log((pAnT / pA) / pnT));
return (float) result;
}
/**
* Retrieve the One-way Support Measure of a constraint for a given trace.
* <p>
* The One-way Support measure is defined as:
* OnewaySupport(A->T) = P(T|A) log_2(P(AT)/(P(A)P(T)))
*
* @return
*/
public static float getTraceOneWaySupport(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = pAT / pA * log2(pAT / (pA * pT));
return (float) result;
}
/**
* Retrieve the One-way Support Measure of a constraint for a given trace.
* <p>
* The One-way Support measure is defined as:
* OnewaySupport(A->T) = P(T|A) log_2(P(AT)/(P(A)P(T)))
*
* @return
*/
public static float getTraceOneWaySupport(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
double result = (pAT / pA) * log2(pAT / (pA * pT));
return (float) result;
}
/**
* Retrieve the Two-way Support Measure of a constraint for a given trace.
* <p>
* The Two-way Support measure is defined as:
* TwoWaySupport(A->T) = P(AT) log_2(P(AT)/(P(A)P(T)))
*
* @return
*/
public static float getTraceTwoWaySupport(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = pAT * log2(pAT / (pA * pT));
return (float) result;
}
/**
* Retrieve the Two-way Support Measure of a constraint for a given trace.
* <p>
* The Two-way Support measure is defined as:
* TwoWaySupport(A->T) = P(AT) log_2(P(AT)/(P(A)P(T)))
*
* @return
*/
public static float getTraceTwoWaySupport(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
double result = pAT * log2(pAT / (pA * pT));
return (float) result;
}
/**
* Retrieve the Two-way Support Variation Measure of a constraint for a given trace.
* <p>
* The Two-way Support Variation measure is defined as:
* TwoWaySupportVariation(A->T) = P(AT) log_2( P(AT)/(P(A)P(T)) ) + P(A¬T) log_2( P(A¬T)/(P(A)P(¬T)) )
* + P(¬AT) log_2( P(¬AT)/(P(¬A)P(T)) ) + P(¬A¬T) log_2( P(¬A¬T)/(P(¬A)P(¬T)) )
*
* @return
*/
public static float getTraceTwoWaySupportVariation(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = pAT * log2(pAT / (pA * pT)) +
pAnT * log2(pAnT / (pA * pnT)) +
pnAT * log2(pnAT / (pnA * pT)) +
pnAnT * log2(pnAnT / (pnA * pnT));
return (float) result;
}
/**
* Retrieve the Two-way Support Variation Measure of a constraint for a given trace.
* <p>
* The Two-way Support Variation measure is defined as:
* TwoWaySupportVariation(A->T) = P(AT) log_2( P(AT)/(P(A)P(T)) ) + P(A¬T) log_2( P(A¬T)/(P(A)P(¬T)) )
* + P(¬AT) log_2( P(¬AT)/(P(¬A)P(T)) ) + P(¬A¬T) log_2( P(¬A¬T)/(P(¬A)P(¬T)) )
*
* @return
*/
public static float getTraceTwoWaySupportVariation(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
double result = pAT * log2(pAT / (pA * pT)) +
pAnT * log2(pAnT / (pA * pnT)) +
pnAT * log2(pnAT / (pnA * pT)) +
pnAnT * log2(pnAnT / (pnA * pnT));
return (float) result;
}
/**
* Retrieve the Linear Correlation Coefficient Measure of a constraint for a given trace.
* <p>
* The Linear Correlation Coefficient measure is defined as:
* LinearCorrelationCoefficient(A->T) = (P(AT)-P(A)P(B)) / ((P(A)P(T)P(¬A)P(¬T))^1/2)
*
* @return
*/
public static float getTraceLinearCorrelationCoefficient(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = (pAT - pA * pT) / Math.sqrt(pA * pT * pnA * pnT);
return (float) result;
}
/**
* Retrieve the Linear Correlation Coefficient Measure of a constraint for a given trace.
* <p>
* The Linear Correlation Coefficient measure is defined as:
* LinearCorrelationCoefficient(A->T) = (P(AT)-P(A)P(B)) / ((P(A)P(T)P(¬A)P(¬T))^1/2)
*
* @return
*/
public static float getTraceLinearCorrelationCoefficient(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
double result = (pAT - pA * pT) / Math.sqrt(pA * pT * pnA * pnT);
return (float) result;
}
/**
* Retrieve the Piatetsky-Shapiro Measure of a constraint for a given trace.
* <p>
* The Piatetsky-Shapiro measure is defined as:
* PiatetskyShapiro(A->T) = P(AT)-P(A)P(T)
*
* @return
*/
public static float getTracePiatetskyShapiro(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = pAT - pA * pT;
return result;
}
/**
* Retrieve the Piatetsky-Shapiro Measure of a constraint for a given trace.
* <p>
* The Piatetsky-Shapiro measure is defined as:
* PiatetskyShapiro(A->T) = P(AT)-P(A)P(T)
*
* @return
*/
public static float getTracePiatetskyShapiro(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
float result = pAT - (pA * pT);
return result;
}
/**
* Retrieve the Cosine Measure of a constraint for a given trace.
* <p>
* The Cosine measure is defined as:
* Cosine(A->T) = P(AT)/(P(A)P(T))^1/2
*
* @return
*/
public static float getTraceCosine(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = pAT / Math.sqrt(pA * pT);
return (float) result;
}
/**
* Retrieve the Cosine Measure of a constraint for a given trace.
* <p>
* The Cosine measure is defined as:
* Cosine(A->T) = P(AT)/(P(A)P(T))^1/2
*
* @return
*/
public static float getTraceCosine(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
double result = pAT / Math.sqrt(pA * pT);
return (float) result;
}
/**
* Retrieve the Information Gain Measure of a constraint for a given trace.
* <p>
* The Information Gain measure is defined as:
* InformationGain(A->T) = log( P(AT)/(P(A)P(T)) )
*
* @return
*/
public static float getTraceInformationGain(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
double result = Math.log(pAT / (pA * pT));
return (float) result;
}
/**
* Retrieve the Information Gain Measure of a constraint for a given trace.
* <p>
* The Information Gain measure is defined as:
* InformationGain(A->T) = log( P(AT)/(P(A)P(T)) )
*
* @return
*/
public static float getTraceInformationGain(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
double result = Math.log(pAT / (pA * pT));
return (float) result;
}
/**
* Retrieve the Sebag-Schoenauer Measure of a constraint for a given trace.
* <p>
* The Sebag-Schoenauer measure is defined as:
* SebagSchoenauer(A->T) = P(AT)/P(A¬T)
*
* @return
*/
public static float getTraceSebagSchoenauer(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = pAT / pAnT;
return result;
}
/**
* Retrieve the Sebag-Schoenauer Measure of a constraint for a given trace.
* <p>
* The Sebag-Schoenauer measure is defined as:
* SebagSchoenauer(A->T) = P(AT)/P(A¬T)
*
* @return
*/
public static float getTraceSebagSchoenauer(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
float result = pAT / pAnT;
return result;
}
/**
* Retrieve the Least Contradiction Measure of a constraint for a given trace.
* <p>
* The Least Contradiction measure is defined as:
* LeastContradiction(A->T) = (P(AT)-P(A¬T)/P(T)
*
* @return
*/
public static float getTraceLeastContradiction(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = (pAT - pAnT) / pT;
return result;
}
/**
* Retrieve the Least Contradiction Measure of a constraint for a given trace.
* <p>
* The Least Contradiction measure is defined as:
* LeastContradiction(A->T) = (P(AT)-P(A¬T)/P(T)
*
* @return
*/
public static float getTraceLeastContradiction(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
float result = (pAT - pAnT) / pT;
return result;
}
/**
* Retrieve the Odd Multiplier Measure of a constraint for a given trace.
* <p>
* The Odd Multiplier measure is defined as:
* OddMultiplier(A->T) = ( P(AT)P(¬T) )/( P(T)P(A¬T) )
*
* @return
*/
public static float getTraceOddMultiplier(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = (pAT * pnT) / (pT * pAnT);
return result;
}
/**
* Retrieve the Odd Multiplier Measure of a constraint for a given trace.
* <p>
* The Odd Multiplier measure is defined as:
* OddMultiplier(A->T) = ( P(AT)P(¬T) )/( P(T)P(A¬T) )
*
* @return
*/
public static float getTraceOddMultiplier(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
float result = (pAT * pnT) / (pT * pAnT);
return result;
}
/**
* Retrieve the Example and Counterexample Rate Measure of a constraint for a given trace.
* <p>
* The Example and Counterexample Rate measure is defined as:
* ExampleCounterexampleRate(A->T) = 1- P(A¬T)/P(AT)
*
* @return
*/
public static float getTraceExampleCounterexampleRate(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = 1 - pAnT / pAT;
return result;
}
/**
* Retrieve the Example and Counterexample Rate Measure of a constraint for a given trace.
* <p>
* The Example and Counterexample Rate measure is defined as:
* ExampleCounterexampleRate(A->T) = 1- P(A¬T)/P(AT)
*
* @return
*/
public static float getTraceExampleCounterexampleRate(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
float result = 1 - (pAnT / pAT);
return result;
}
/**
* Retrieve the Zhang Measure of a constraint for a given trace.
* <p>
* The Zhang measure is defined as:
* Zhang(A->T) = ( P(AT)-P(A)P(T) ) / Max( P(AT)P(¬T), P(T)P(A¬T))
*
* @return
*/
public static float getTraceZhang(byte[] reactiveConstraintEvaluation) {
float[] p = getReactiveProbabilities(reactiveConstraintEvaluation);// result { 0: activation, 1: target, 2: no activation, 3: no target}
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float[] pIntersection = getReactiveIntersectionsProbabilities(reactiveConstraintEvaluation);// result {0: 00, 1: 01, , 2: 10, 3:11}
float pnAnT = pIntersection[0];
float pnAT = pIntersection[1];
float pAnT = pIntersection[2];
float pAT = pIntersection[3];
float result = (pAT - pA * pT) / Math.max(pAT * pnT, pT * pAnT);
return result;
}
/**
* Retrieve the Zhang Measure of a constraint for a given trace.
* <p>
* The Zhang measure is defined as:
* Zhang(A->T) = ( P(AT)-P(A)P(T) ) / Max( P(AT)P(¬T), P(T)P(A¬T))
*
* @return
*/
public static float getTraceZhang(float[] p) {
float pA = p[0];
float pnA = p[2];
float pT = p[1];
float pnT = p[3];
float pnAnT = p[4];
float pnAT = p[5];
float pAnT = p[6];
float pAT = p[7];
float n = p[8];
float result = (pAT - (pA * pT)) / Math.max((pAT * pnT), (pT * pAnT));
return result;
}
/**
* return the support measure for a given constraint over the entire log
*
* @return
*/
// @Deprecated
public static float getLogSupport(int constraintIndex, MegaMatrixMonster matrix) {
return getMeasureAverage(constraintIndex, 0, matrix.getTraceMeasuresMatrix());
// return getLogDuckTapeMeasures(constraintIndex, 0, matrix.getMatrix());
}
/**
* return the given measure of a constraint over the entire log using the "tape" method:
* Consider the log as a single trace and compute the measure with the trace methods
* <p>
* BEWARE! It is the bottleneck of the aggregated measure output function. Temporary disabled
*
* @param constraintIndex
* @param measureIndex
* @param bytesMatrix
* @return
*/
public static double getLogDuckTapeMeasures(int constraintIndex, int measureIndex, byte[][][] bytesMatrix, boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
double result = 0;
byte[] tapeLog = {};
for (byte[][] trace : bytesMatrix) {
tapeLog = ArrayUtils.addAll(tapeLog, trace[constraintIndex]);
}
return getTraceMeasure(tapeLog, measureIndex, nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
/**
* Compute the probability for a SINGLE constraint over the entire log seen as a single (duck)tape
*
* @return
*/
public static double getLogDuckTapeProbability() {
// TODO LogDuckTape
return 0;
}
/**
* return the X measure of a constraint over the entire log as the average of the support within all the traces
*
* @return
*/
// @Deprecated
public static float getMeasureAverage(int constraintIndex, int measureIndex, float[][][] traceMeasuresMatrix) {
float result = 0;
for (float[][] traceEval : traceMeasuresMatrix) {
result += traceEval[constraintIndex][measureIndex];
}
return result / traceMeasuresMatrix.length;
}
/**
* Retrieve the measure distribution info.
* it takes the results of all the traces and draw the distribution properties.
* i.e. average value, standard deviation, quartile, max, min
*
* @param traceMeasures array containing the measure value for each trace
* @param nanLogSkipFlag skip NaN values during the computation
* @return array with the distribution values
*/
public static double[] getMeasureDistribution(double[] traceMeasures, boolean nanLogSkipFlag) {
SummaryStatistics measureDistribution = new SummaryStatistics();
for (double measure : traceMeasures) {
if (nanLogSkipFlag && Double.isNaN(measure)) continue;
measureDistribution.addValue(measure);
}
double[] result = {
measureDistribution.getMean(),
measureDistribution.getGeometricMean(),
measureDistribution.getVariance(),
measureDistribution.getPopulationVariance(),
measureDistribution.getStandardDeviation(),
measureDistribution.getMax(),
measureDistribution.getMin()
};
return result;
}
/**
* Returns an object containing the statistic of the measure distribution for a given measure and constraint over the MegaMatrixMonster
*
* @param constraintIndex
* @param measureIndex
* @param traceMeasuresMatrix
* @param nanLogSkipFlag
* @return
*/
public static SummaryStatistics getMeasureDistributionObject(int constraintIndex, int measureIndex, float[][][] traceMeasuresMatrix, boolean nanLogSkipFlag) {
SummaryStatistics measureDistribution = new SummaryStatistics();
for (float[][] traceEval : traceMeasuresMatrix) {
if (nanLogSkipFlag && Float.isNaN(traceEval[constraintIndex][measureIndex]))
continue;
/*
infinity vales make SummaryStatistics returns NaN for the mean and other stats.
Either skip them or change the mean function to consider them
*/
if (Float.isInfinite(traceEval[constraintIndex][measureIndex])) {
if (traceEval[constraintIndex][measureIndex] > 0)
measureDistribution.addValue(Float.MAX_VALUE);
else
measureDistribution.addValue(Float.MIN_VALUE);
} else
measureDistribution.addValue(traceEval[constraintIndex][measureIndex]);
}
return measureDistribution;
}
/**
* Returns an object containing the statistic of the measure distribution for a given constraint given the matrix result of only one measure over the log
*
* @param constraintIndex
* @param traceMeasureMatrix
* @param nanLogSkipFlag
* @return
*/
public static SummaryStatistics getMeasureDistributionObject(int constraintIndex, float[][] traceMeasureMatrix, boolean nanLogSkipFlag) {
SummaryStatistics measureDistribution = new SummaryStatistics();
for (float[] traceEval : traceMeasureMatrix) {
if (nanLogSkipFlag && Float.isNaN(traceEval[constraintIndex]))
continue;
measureDistribution.addValue(traceEval[constraintIndex]);
}
return measureDistribution;
}
/**
* given an evaluation array of a reactive constraint, extract the result of only the activator as an array of 0s and 1s
*
* @param reactiveConstraintEvaluation
* @return
*/
private static byte[] getActivatorEvaluation(byte[] reactiveConstraintEvaluation) {
byte[] result = new byte[reactiveConstraintEvaluation.length];
for (int i = 0; i < result.length; i++) {
result[i] = (byte) (reactiveConstraintEvaluation[i] / 2); // the activator is true if the byte is >1, i.e. 2 or 3
}
return result;
}
/**
* given an evaluation array of a reactive constraint, extract the result of only the target as an array of 0s and 1s
*
* @param reactiveConstraintEvaluation
* @return
*/
private static byte[] getTargetEvaluation(byte[] reactiveConstraintEvaluation) {
byte[] result = new byte[reactiveConstraintEvaluation.length];
for (int i = 0; i < result.length; i++) {
result[i] = (byte) (reactiveConstraintEvaluation[i] % 2); // the target is true if the byte is obb, i,e, 1 or 3
}
return result;
}
/**
* Return the inverse result evaluation, i.e., swap of 1 to 0 and vice-versa
*
* @param evaluation
* @return
*/
private static byte[] getNegativeEvaluation(byte[] evaluation) {
byte[] result = evaluation.clone();
for (int i = 0; i < result.length; i++) {
if (result[i] == 1) {
result[i] = 0;
} else {
result[i] = 1;
}
}
return result;
}
/**
* Return the inverse result evaluation, i.e., swap of 1 to 0 and vice-versa
*
* @param reactiveConstraintEvaluation
* @return
*/
private static byte[] getNegativeReactiveConstraintEvaluation(byte[] reactiveConstraintEvaluation) {
byte[] result = reactiveConstraintEvaluation.clone();
for (int i = 0; i < result.length; i++) {
result[i] = (byte) (3 - result[i]);
}
return result;
}
/**
* Return the logarithm in base 2 of a given number
*
* @param number
* @return
*/
private static double log2(float number) {
// return (Math.log(number) / Math.log(2) + 1e-10);
return Math.log(number) / Math.log(2);
}
}
| 93,173 | 33.844428 | 195 | java |
Janus | Janus-master/src/minerful/reactive/measurements/MegaMatrixMonster.java | package minerful.reactive.measurements;
import minerful.logparser.LogParser;
import minerful.reactive.automaton.SeparatedAutomatonOfflineRunner;
import minerful.reactive.miner.ReactiveMinerOfflineQueryingCore;
import minerful.reactive.params.JanusPrintParameters;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.apache.log4j.Logger;
import java.io.*;
import java.util.ArrayList;
import java.util.Collection;
/**
* Data structure for the fine grain evaluation result of constraints in each event of a log traces
* <p>
* About variable matrix (byte[][][]) bytes meaning:
* Each byte stores the results of both Activator and target of a given constraint in a specific trace.
* The left bit is for the activator, the right bit for the target,i.e.,[activator-bit][target-bit]
* In details:
* 0 -> 00 -> Activator: False, Target: False
* 1 -> 01 -> Activator: False, Target: true
* 2 -> 10 -> Activator: True, Target: False
* 3 -> 11 -> Activator: True, Target: True
*
* <p>
* About variable matrixLite (int[][][]) meaning:
* compact version of the byte[][][] where instead of saving the result for each event, we keep only what is required for the traces measures computation.
* Each int stores the counter of the results of a combination of Activator and target of a given constraint in a specific trace.
* In details:
* COUNTER INDEX -> Explanation
* 0 -> Number of Activator: True [#]
* 1 -> Number of Target: True [#]
* 2 -> Number of Activator: False
* 3 -> Number of Target: False
* 4 -> Number of Activator: False, Target: False
* 5 -> Number of Activator: False, Target: true
* 6 -> Number of Activator: True, Target: False
* 7 -> Number of Activator: True, Target: True [#]
* 8 -> Trace lenght [#]
* <p>
* Note. Supposedly only 4 value (marked with #) are enough to derive all the others, but lets try to keep all 9 for now
* <p>
* About model measures:
* the model measures are computed considering the model as a constraint itself.
* It is always the last constraint, thus all the automata.size()+1 along this class.
* <p>
* The rationale of the trace evaluation of the model is:
* take all the activated automata in one instant of the trace and check if their targets are satisfied.
* Practically speaking:
* if there is at least one 10, then the entire model evaluates to 10,
* else if there is at least one 11, then the entire model evaluates to 11,
* else if there is at least one 01, then the entire model evaluates to 01,
* otherwise the entire model evaluates to 00.
*/
public class MegaMatrixMonster {
protected static Logger logger;
private final LogParser log;
private final Collection<SeparatedAutomatonOfflineRunner> automata;
private byte[][][] eventsEvaluationMatrix; // [trace index][constraint index][event index]
private int[][][] eventsEvaluationMatrixLite; // [trace index][constraint index][counter index]
private float[][][] traceMeasuresMatrix; // [trace index][constraint index][measure index] -> support:0, confidence:1, lovinger: 2
private SummaryStatistics[][] traceMeasuresDescriptiveStatistics; // [constraint index][measure index]
private float[][] logMeasuresMatrix; // [constraint index][measure index]
private JanusPrintParameters janusViewParams;
{
if (logger == null) {
logger = Logger.getLogger(ReactiveMinerOfflineQueryingCore.class.getCanonicalName());
}
}
public MegaMatrixMonster(LogParser log, Collection<SeparatedAutomatonOfflineRunner> automata) {
this.log = log;
this.automata = automata;
this.janusViewParams = new JanusPrintParameters();
}
public MegaMatrixMonster(LogParser log, Collection<SeparatedAutomatonOfflineRunner> automata, JanusPrintParameters janusViewParams) {
this(log, automata);
this.janusViewParams = janusViewParams;
}
public MegaMatrixMonster(byte[][][] matrix, LogParser log, Collection<SeparatedAutomatonOfflineRunner> automata) {
this(log, automata);
this.eventsEvaluationMatrix = matrix;
System.gc();
}
public MegaMatrixMonster(byte[][][] matrix, LogParser log, Collection<SeparatedAutomatonOfflineRunner> automata, JanusPrintParameters janusViewParams) {
this(matrix, log, automata);
this.janusViewParams = janusViewParams;
}
public MegaMatrixMonster(int[][][] matrixLite, LogParser log, Collection<SeparatedAutomatonOfflineRunner> automata) {
this(log, automata);
this.eventsEvaluationMatrixLite = matrixLite;
System.gc();
}
public MegaMatrixMonster(int[][][] matrixLite, LogParser log, Collection<SeparatedAutomatonOfflineRunner> automata, JanusPrintParameters janusViewParams) {
this(matrixLite, log, automata);
this.janusViewParams = janusViewParams;
}
/**
* Return the space required to serialize the current results of the Mega Matrix Monster
*
* @return
* @throws IOException
*/
public double getSpaceConsumption(String filePath) throws IOException {
double result = 0.0;
// events
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = null;
FileOutputStream fos = new FileOutputStream(filePath, true);
// fos.write("traces;events-TOT;Constraints;Measures;EventsSpace;TracesSpace;LogSpace\n".getBytes());
if (eventsEvaluationMatrixLite != null)
fos.write(("" + eventsEvaluationMatrixLite.length + ";" + log.numberOfEvents() + ";" + eventsEvaluationMatrixLite[0].length + ";" + traceMeasuresMatrix[0][0].length + ";").getBytes());
else
fos.write(("" + eventsEvaluationMatrix.length + ";" + log.numberOfEvents() + ";" + eventsEvaluationMatrix[0].length + ";" + traceMeasuresMatrix[0][0].length + ";").getBytes());
try {
oos = new ObjectOutputStream(baos);
if (eventsEvaluationMatrixLite != null)
oos.writeObject(eventsEvaluationMatrixLite);
else
oos.writeObject(eventsEvaluationMatrix);
oos.flush();
oos.close();
logger.info("size of events measures data structure : " + baos.size() / 1024d / 1024d + " MB");
fos.write(("" + baos.size() / 1024d / 1024d + " MB;").getBytes());
result += baos.size();
} catch (IOException | OutOfMemoryError e) {
logger.error("size of events measures data structure TOO BIG for serialization");
fos.write(("outOfMem").getBytes());
e.printStackTrace();
}
// traces
try {
baos = new ByteArrayOutputStream();
oos = new ObjectOutputStream(baos);
oos.writeObject(traceMeasuresMatrix);
oos.flush();
oos.close();
logger.info("size of traces measures data structure : " + baos.size() / 1024d / 1024d + " MB");
fos.write(("" + baos.size() / 1024d / 1024d + " MB;").getBytes());
result += baos.size();
} catch (IOException | OutOfMemoryError e) {
logger.error("size of traces measures data structure TOO BIG for serialization");
fos.write(("outOfMem").getBytes());
e.printStackTrace();
}
// TRACE STATSS
try {
baos = new ByteArrayOutputStream();
oos = new ObjectOutputStream(baos);
oos.writeObject(traceMeasuresDescriptiveStatistics);
oos.flush();
oos.close();
logger.info("size of trace measures stats data structure : " + baos.size() / 1024d / 1024d + " MB");
fos.write(("" + baos.size() / 1024d / 1024d + " MB\n").getBytes());
result += baos.size();
} catch (IOException | OutOfMemoryError e) {
logger.error("size of trace measures stats data structure TOO BIG for serialization");
fos.write(("outOfMem\n").getBytes());
e.printStackTrace();
}
// NEU LOG
// log
try {
baos = new ByteArrayOutputStream();
oos = new ObjectOutputStream(baos);
oos.writeObject(logMeasuresMatrix);
oos.flush();
oos.close();
logger.info("size of log measures data structure : " + baos.size() / 1024d / 1024d + " MB");
fos.write(("" + baos.size() / 1024d / 1024d + " MB\n").getBytes());
result += baos.size();
} catch (IOException | OutOfMemoryError e) {
logger.error("size of log measures data structure TOO BIG for serialization");
fos.write(("outOfMem\n").getBytes());
e.printStackTrace();
}
logger.info("Size of MegaMatrixMonster results : " + result / 1024d / 1024d + " MB");
fos.close();
return result / 1024d / 1024d;
}
public float[][] getLogMeasuresMatrix() {
return logMeasuresMatrix;
}
public byte[][][] getEventsEvaluationMatrix() {
return eventsEvaluationMatrix;
}
public int[][][] getEventsEvaluationMatrixLite() {
return eventsEvaluationMatrixLite;
}
public LogParser getLog() {
return log;
}
public Collection<SeparatedAutomatonOfflineRunner> getAutomata() {
return automata;
}
public Collection<String> getConstraintsNames() {
Collection<String> result = new ArrayList<>();
for (SeparatedAutomatonOfflineRunner c : automata) {
result.add(c.toString());
}
return result;
}
public float[][][] getTraceMeasuresMatrix() {
return traceMeasuresMatrix;
}
/**
* Return the number of constraints in the matrix
*
* @return
*/
public int getConstraintsNumber() {
if (eventsEvaluationMatrixLite == null) {
return eventsEvaluationMatrix[0].length;
} else {
return eventsEvaluationMatrixLite[0].length;
}
}
/**
* Get the specific measure of a specific trace for a specific constraint
*
* @param trace
* @param constraint
* @param measureIndex
* @return
*/
public double getSpecificMeasure(int trace, int constraint, int measureIndex) {
return traceMeasuresMatrix[trace][constraint][measureIndex];
}
/**
* retrieve the measurements for the current matrix/matrixLite
*
* @param nanTraceSubstituteFlag
* @param nanTraceSubstituteValue
* @param nanLogSkipFlag
*/
public void computeAllMeasures(boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue, boolean nanLogSkipFlag) {
logger.info("Initializing measures matrix...");
// TRACE MEASURES
logger.info("Retrieving Trace Measures...");
if (eventsEvaluationMatrixLite == null) {
traceMeasuresMatrix = new float[eventsEvaluationMatrix.length][automata.size() + 1][Measures.MEASURE_NUM]; //the space problem is here, not in the byte matrix
computeTraceMeasuresMonster(nanTraceSubstituteFlag, nanTraceSubstituteValue);
} else {
traceMeasuresMatrix = new float[eventsEvaluationMatrixLite.length][automata.size() + 1][Measures.MEASURE_NUM]; //the space problem is here, not in the byte matrix
computeTraceMeasuresLite(nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
System.gc();
logger.info("Retrieving Trace measures log statistics...");
// trace measure LOG STATISTICS
int constraintsNum = automata.size() + 1;
for (int constraint = 0; constraint < (automata.size() + 1); constraint++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rConstraint: " + constraint + "/" + constraintsNum); // Status counter "current trace/total trace"
for (int measure = 0; measure < Measures.MEASURE_NUM; measure++) {
traceMeasuresDescriptiveStatistics[constraint][measure] = Measures.getMeasureDistributionObject(constraint, measure, traceMeasuresMatrix, nanLogSkipFlag);
}
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rConstraint: " + constraintsNum + "/" + constraintsNum); // Status counter "current trace/total trace"
System.out.println();
}
System.gc();
logger.info("Retrieving NEW Log Measures...");
// LOG MEASURES
logMeasuresMatrix = new float[automata.size() + 1][Measures.MEASURE_NUM];
computeAllLogMeasures();
}
/**
* retrieve the measurements for the current matrix/matrixLite
*
* @param nanTraceSubstituteFlag
* @param nanTraceSubstituteValue
*/
public void computeAllTraceMeasures(boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
logger.info("Initializing measures matrix...");
// TRACE MEASURES
logger.info("Retrieving Trace Measures...");
if (eventsEvaluationMatrixLite == null) {
traceMeasuresMatrix = new float[eventsEvaluationMatrix.length][automata.size() + 1][Measures.MEASURE_NUM]; //the space problem is here, not in the byte matrix
computeTraceMeasuresMonster(nanTraceSubstituteFlag, nanTraceSubstituteValue);
} else {
traceMeasuresMatrix = new float[eventsEvaluationMatrixLite.length][automata.size() + 1][Measures.MEASURE_NUM]; //the space problem is here, not in the byte matrix
computeTraceMeasuresLite(nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
System.gc();
}
/**
* retrieve the measurements for the current matrix/matrixLite
*
* @param nanTraceSubstituteFlag
* @param nanTraceSubstituteValue
*/
public void computeSingleTraceMeasures(String measureName, boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
logger.info("Initializing measures matrix...");
int measureIndex = Measures.getMeasureIndex(measureName);
logger.info("Retrieving Trace Measures...");
if (eventsEvaluationMatrixLite == null) {
traceMeasuresMatrix = new float[eventsEvaluationMatrix.length][automata.size() + 1][1]; //the space problem is here, not in the byte matrix
// for the entire log
for (int trace = 0; trace < eventsEvaluationMatrix.length; trace++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rTraces: " + trace + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
// for each trace
for (int constraint = 0; constraint < eventsEvaluationMatrix[trace].length; constraint++) {
traceMeasuresMatrix[trace][constraint][0] = Measures.getTraceMeasure(eventsEvaluationMatrix[trace][constraint], measureIndex, nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rTraces: " + eventsEvaluationMatrix.length + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
System.out.println();
}
} else {
traceMeasuresMatrix = new float[eventsEvaluationMatrixLite.length][automata.size() + 1][1]; //the space problem is here, not in the byte matrix
// for the entire log
for (int trace = 0; trace < eventsEvaluationMatrixLite.length; trace++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rTraces: " + trace + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
// for each trace
for (int constraint = 0; constraint < eventsEvaluationMatrixLite[trace].length; constraint++) {
// for each constraint
traceMeasuresMatrix[trace][constraint][0] = Measures.getTraceMeasure(eventsEvaluationMatrixLite[trace][constraint], measureIndex, nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rTraces: " + eventsEvaluationMatrix.length + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
System.out.println();
}
}
System.gc();
}
/**
* retrieve the measurements for the current matrix/matrixLite
*
* @param nanLogSkipFlag
*/
public void computeAllTraceMeasuresStats(boolean nanLogSkipFlag) {
logger.info("Retrieving Trace measures log statistics...");
traceMeasuresDescriptiveStatistics = new SummaryStatistics[automata.size() + 1][Measures.MEASURE_NUM];
// trace measure LOG STATISTICS
int constraintsNum = automata.size() + 1;
for (int constraint = 0; constraint < (automata.size() + 1); constraint++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rConstraint: " + constraint + "/" + constraintsNum); // Status counter "current trace/total trace"
for (int measure = 0; measure < Measures.MEASURE_NUM; measure++) {
traceMeasuresDescriptiveStatistics[constraint][measure] = Measures.getMeasureDistributionObject(constraint, measure, traceMeasuresMatrix, nanLogSkipFlag);
}
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rConstraint: " + constraintsNum + "/" + constraintsNum); // Status counter "current trace/total trace"
System.out.println();
}
System.gc();
}
/**
* retrieve the measurements for the current matrix/matrixLite
*
* @param nanLogSkipFlag
*/
public void computeSingleTraceMeasuresStats(boolean nanLogSkipFlag) {
logger.info("Retrieving Trace measures log statistics...");
traceMeasuresDescriptiveStatistics = new SummaryStatistics[automata.size() + 1][1];
// trace measure LOG STATISTICS
int constraintsNum = automata.size() + 1;
for (int constraint = 0; constraint < (automata.size() + 1); constraint++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rConstraint: " + constraint + "/" + constraintsNum); // Status counter "current trace/total trace"
traceMeasuresDescriptiveStatistics[constraint][0] = Measures.getMeasureDistributionObject(constraint, 0, traceMeasuresMatrix, nanLogSkipFlag);
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rConstraint: " + constraintsNum + "/" + constraintsNum); // Status counter "current trace/total trace"
System.out.println();
}
System.gc();
}
/**
* Calculate a specific measure at the traces level for all the constraints, given its name.
* The measurements are returned in output and not stored into the object.
*
* @param nanTraceSubstituteFlag
* @param nanTraceSubstituteValue
* @param measureName
*/
public float[][] retrieveSingleTraceMeasures(String measureName, boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
return retrieveSingleTraceMeasures(Measures.getMeasureIndex(measureName), nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
/**
* Calculate a specific measure at the traces level for all the constraints, given its index.
* The measurements are returned in output and not stored into the object.
*
* @param nanTraceSubstituteFlag
* @param nanTraceSubstituteValue
* @param measureIndex
*/
public float[][] retrieveSingleTraceMeasures(int measureIndex, boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
logger.info("Initializing traces measure matrix...");
float[][] measureResult; //the space problem is here, not in the byte matrix
logger.info("Retrieving Trace Measures...");
if (eventsEvaluationMatrixLite == null) {
measureResult = new float[eventsEvaluationMatrix.length][automata.size() + 1];
// for the entire log
for (int trace = 0; trace < eventsEvaluationMatrix.length; trace++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rTraces: " + trace + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
// for each trace
for (int constraint = 0; constraint < eventsEvaluationMatrix[trace].length; constraint++) {
measureResult[trace][constraint] = Measures.getTraceMeasure(eventsEvaluationMatrix[trace][constraint], measureIndex, nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rTraces: " + eventsEvaluationMatrix.length + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
System.out.println();
}
} else {
measureResult = new float[eventsEvaluationMatrixLite.length][automata.size() + 1];
// for the entire log
for (int trace = 0; trace < eventsEvaluationMatrixLite.length; trace++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rTraces: " + trace + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
// for each trace
for (int constraint = 0; constraint < eventsEvaluationMatrixLite[trace].length; constraint++) {
// for each constraint
measureResult[trace][constraint] = Measures.getTraceMeasure(eventsEvaluationMatrixLite[trace][constraint], measureIndex, nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rTraces: " + eventsEvaluationMatrix.length + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
System.out.println();
}
}
return measureResult;
}
/**
* Calculate a specific measure at the log level for all the constraints, given its specific trace measurements.
* The measurements are returned in output and not stored into the object.
*
* @param nanLogSkipFlag
*/
public SummaryStatistics[] computeSingleMeasureLog(float[][] traceMeasures, boolean nanLogSkipFlag) {
logger.info("Initializing log measure matrix...");
int constraintsNum = automata.size() + 1;
SummaryStatistics[] logMeasuresresult = new SummaryStatistics[constraintsNum];
logger.info("Retrieving Log Measures...");
for (int constraint = 0; constraint < constraintsNum; constraint++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rConstraint: " + constraint + "/" + constraintsNum); // Status counter "current trace/total trace"
logMeasuresresult[constraint] = Measures.getMeasureDistributionObject(constraint, traceMeasures, nanLogSkipFlag);
}
if (!janusViewParams.suppressMeasuresStatusPrint) System.out.println();
return logMeasuresresult;
}
/**
* retrieve the measurements for the current matrix
*
* @param nanTraceSubstituteFlag
* @param nanTraceSubstituteValue
*/
private void computeTraceMeasuresMonster(boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
// for the entire log
for (int trace = 0; trace < eventsEvaluationMatrix.length; trace++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rTraces: " + trace + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
// for each trace
for (int constraint = 0; constraint < eventsEvaluationMatrix[trace].length; constraint++) {
// for each constraint
for (int measure = 0; measure < Measures.MEASURE_NUM; measure++) {
traceMeasuresMatrix[trace][constraint][measure] = Measures.getTraceMeasure(eventsEvaluationMatrix[trace][constraint], measure, nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
}
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rTraces: " + eventsEvaluationMatrix.length + "/" + eventsEvaluationMatrix.length); // Status counter "current trace/total trace"
System.out.println();
}
}
/**
* retrieve the measurements for the current matrix/matrixLite
*/
public void computeAllLogMeasures() {
logger.info("Retrieving Log Measures...");
logMeasuresMatrix = new float[automata.size() + 1][Measures.MEASURE_NUM];
int constraintsNum = automata.size() + 1;
int tracesNum = log.wholeLength();
// for each constraint
for (int constraint = 0; constraint < constraintsNum; constraint++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rConstraint: " + constraint + "/" + constraintsNum); // Status counter "current trace/total trace"
// for each measure
float[] currentTraceProbabilities = new float[9];
float ATgivenA = 0;
float AnotTgivenA = 0;
float notATgivenNotA = 0;
float notAnotTgivenNotA = 0;
for (int trace = 0; trace < tracesNum; trace++) {
// result { 0: activation, 1: target, 2: no activation, 3: no target}
// result {4: 00, 5: 01, , 6: 10, 7:11}
// result {8: trace length}
// A/n -A/n T/n -T/n AT/n A-T/n -AT/n -A-T/n N
// 0 2 1 3 7 6 5 4 8
if (eventsEvaluationMatrixLite == null) {
currentTraceProbabilities = Measures.getTraceProbabilities(eventsEvaluationMatrix[trace][constraint]);
} else {
currentTraceProbabilities = Measures.getTraceProbabilities(eventsEvaluationMatrixLite[trace][constraint]);
}
// AT|A A-T|A -AT|-A -A-T|-A
if (Float.isNaN(currentTraceProbabilities[7] / currentTraceProbabilities[0])) {
notATgivenNotA += currentTraceProbabilities[5] / currentTraceProbabilities[2];
notAnotTgivenNotA += currentTraceProbabilities[4] / currentTraceProbabilities[2];
} else {
ATgivenA += currentTraceProbabilities[7] / currentTraceProbabilities[0];
AnotTgivenA += currentTraceProbabilities[6] / currentTraceProbabilities[0];
}
}
ATgivenA /= tracesNum;
AnotTgivenA /= tracesNum;
notATgivenNotA /= tracesNum;
notAnotTgivenNotA /= tracesNum;
float A = ATgivenA + AnotTgivenA;
float notA = notATgivenNotA + notAnotTgivenNotA;
float T = ATgivenA + notATgivenNotA;
float notT = AnotTgivenA + notAnotTgivenNotA;
float n = tracesNum;
// float pA = p[0];
// float pT = p[1];
// float pnA = p[2];
// float pnT = p[3];
// float pnAnT = p[4];
// float pnAT = p[5];
// float pAnT = p[6];
// float pAT = p[7];
float[] currentLogProbabilities = {A, T, notA, notT, notAnotTgivenNotA, notATgivenNotA, AnotTgivenA, ATgivenA, n};
for (int measure = 0; measure < Measures.MEASURE_NUM; measure++) {
logMeasuresMatrix[constraint][measure] = Measures.getLogMeasure(currentLogProbabilities, measure);
}
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rConstraint: " + constraintsNum + "/" + constraintsNum); // Status counter "current trace/total trace"
System.out.println();
}
System.gc();
}
/**
* retrieve the measurements for the current matrix/matrixLite
*/
public void computeSingleLogMeasures(String measureName) {
logger.info("Retrieving Log Measures...");
logMeasuresMatrix = new float[automata.size() + 1][1];
int constraintsNum = automata.size() + 1;
int tracesNum = log.wholeLength();
// for each constraint
for (int constraint = 0; constraint < constraintsNum; constraint++) {
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rConstraint: " + constraint + "/" + constraintsNum); // Status counter "current trace/total trace"
// for each measure
float[] currentTraceProbabilities = new float[9];
float ATgivenA = 0;
float AnotTgivenA = 0;
float notATgivenNotA = 0;
float notAnotTgivenNotA = 0;
for (int trace = 0; trace < tracesNum; trace++) {
// result { 0: activation, 1: target, 2: no activation, 3: no target}
// result {4: 00, 5: 01, , 6: 10, 7:11}
// result {8: trace length}
// A/n -A/n T/n -T/n AT/n A-T/n -AT/n -A-T/n N
// 0 2 1 3 7 6 5 4 8
if (eventsEvaluationMatrixLite == null) {
currentTraceProbabilities = Measures.getTraceProbabilities(eventsEvaluationMatrix[trace][constraint]);
} else {
currentTraceProbabilities = Measures.getTraceProbabilities(eventsEvaluationMatrixLite[trace][constraint]);
}
// AT|A A-T|A -AT|-A -A-T|-A
if (Float.isNaN(currentTraceProbabilities[7] / currentTraceProbabilities[0])) {
notATgivenNotA += currentTraceProbabilities[5] / currentTraceProbabilities[2];
notAnotTgivenNotA += currentTraceProbabilities[4] / currentTraceProbabilities[2];
} else {
ATgivenA += currentTraceProbabilities[7] / currentTraceProbabilities[0];
AnotTgivenA += currentTraceProbabilities[6] / currentTraceProbabilities[0];
}
}
ATgivenA /= tracesNum;
AnotTgivenA /= tracesNum;
notATgivenNotA /= tracesNum;
notAnotTgivenNotA /= tracesNum;
float A = ATgivenA + AnotTgivenA;
float notA = notATgivenNotA + notAnotTgivenNotA;
float T = ATgivenA + notATgivenNotA;
float notT = AnotTgivenA + notAnotTgivenNotA;
float n = tracesNum;
// float pA = p[0];
// float pT = p[1];
// float pnA = p[2];
// float pnT = p[3];
// float pnAnT = p[4];
// float pnAT = p[5];
// float pAnT = p[6];
// float pAT = p[7];
float[] currentLogProbabilities = {A, T, notA, notT, notAnotTgivenNotA, notATgivenNotA, AnotTgivenA, ATgivenA, n};
logMeasuresMatrix[constraint][0] = Measures.getLogMeasure(currentLogProbabilities, Measures.getMeasureIndex(measureName));
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rConstraint: " + constraintsNum + "/" + constraintsNum); // Status counter "current trace/total trace"
System.out.println();
}
System.gc();
}
/**
* retrieve the measurements for the current matrixLite
*
* @param nanTraceSubstituteFlag
* @param nanTraceSubstituteValue
*/
private void computeTraceMeasuresLite(boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue) {
// for the entire log
for (int trace = 0; trace < eventsEvaluationMatrixLite.length; trace++) {
// for each trace
for (int constraint = 0; constraint < eventsEvaluationMatrixLite[trace].length; constraint++) {
// for each constraint
for (int measure = 0; measure < Measures.MEASURE_NUM; measure++) {
traceMeasuresMatrix[trace][constraint][measure] = Measures.getTraceMeasure(eventsEvaluationMatrixLite[trace][constraint], measure, nanTraceSubstituteFlag, nanTraceSubstituteValue);
}
}
}
}
public SummaryStatistics[][] getTraceMeasuresDescriptiveStatistics() {
return traceMeasuresDescriptiveStatistics;
}
/**
* Get the name of the i-th measure
*
* @return
*/
public String getMeasureName(int measureIndex) {
return Measures.MEASURE_NAMES[measureIndex];
}
/**
* Get the names of all the measures
*
* @return
*/
public String[] getMeasureNames() {
return Measures.MEASURE_NAMES;
}
}
| 33,375 | 45.876404 | 200 | java |
Janus | Janus-master/src/minerful/reactive/measurements/ReactiveMeasurementsOfflineQueryingCore.java | package minerful.reactive.measurements;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.ConstraintsBag;
import minerful.logparser.LogParser;
import minerful.logparser.LogTraceParser;
import minerful.reactive.params.JanusMeasurementsCmdParameters;
import minerful.miner.stats.GlobalStatsTable;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import minerful.reactive.automaton.SeparatedAutomatonOfflineRunner;
import minerful.reactive.params.JanusPrintParameters;
import org.apache.log4j.Logger;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Callable;
/**
* Class to manage and organize the run of automata over a Log/Trace
*/
public class ReactiveMeasurementsOfflineQueryingCore implements Callable<MegaMatrixMonster> {
protected static Logger logger;
private final LogParser logParser;
private final JanusMeasurementsCmdParameters janusCheckingParams;
private final JanusPrintParameters janusViewParams;
private final PostProcessingCmdParameters postPrarams;
private final TaskCharArchive taskCharArchive; // alphabet
private final GlobalStatsTable globalStatsTable;
private final ConstraintsBag bag; // rules to mine
private final int jobNum;
private MegaMatrixMonster megaMonster; // £d byte matrix with fine grain result
{
if (logger == null) {
logger = Logger.getLogger(ReactiveMeasurementsOfflineQueryingCore.class.getCanonicalName());
}
}
/**
* Constructor
*
* @param jobNum
* @param logParser
* @param janusCheckingParams
* @param postPrarams
* @param taskCharArchive
* @param globalStatsTable
* @param bag
*/
public ReactiveMeasurementsOfflineQueryingCore(int jobNum, LogParser logParser, JanusMeasurementsCmdParameters janusCheckingParams, JanusPrintParameters janusViewParams,
PostProcessingCmdParameters postPrarams, TaskCharArchive taskCharArchive,
GlobalStatsTable globalStatsTable, ConstraintsBag bag) {
this.jobNum = jobNum;
this.logParser = logParser;
this.janusCheckingParams = janusCheckingParams;
this.janusViewParams = janusViewParams;
this.postPrarams = postPrarams;
this.taskCharArchive = taskCharArchive;
this.globalStatsTable = globalStatsTable;
this.bag = bag;
}
/**
* Run a set of separatedAutomata over a single trace
*
* @param logTraceParser reader for a trace
* @param automata set of separatedAutomata to test over the trace
* @return boolean matrix with the evaluation in each single event of all the constraints
*/
public static void runTrace(LogTraceParser logTraceParser, List<SeparatedAutomatonOfflineRunner> automata, byte[][] results) {
// reset automata for a clean run
for (SeparatedAutomatonOfflineRunner automatonOfflineRunner : automata) {
automatonOfflineRunner.reset();
}
// retrieve the entire trace
logTraceParser.init();
char[] trace = logTraceParser.encodeTrace().toCharArray();
// evaluate the trace with each constraint (i.e. separated automaton)
int i = 0;
for (SeparatedAutomatonOfflineRunner automatonOfflineRunner : automata) {
results[i] = new byte[trace.length];
automatonOfflineRunner.runTrace(trace, trace.length, results[i++]);
}
}
/**
* Run a set of separatedAutomata over a single trace
*
* @param logTraceParser reader for a trace
* @param automata set of separatedAutomata to test over the trace
* @return boolean matrix with the evaluation in each single event of all the constraints
*/
public static void runTraceLite(LogTraceParser logTraceParser, List<SeparatedAutomatonOfflineRunner> automata, int[][] results) {
// reset automata for a clean run
for (SeparatedAutomatonOfflineRunner automatonOfflineRunner : automata) {
automatonOfflineRunner.reset();
}
// retrieve the entire trace
logTraceParser.init();
char[] trace = logTraceParser.encodeTrace().toCharArray();
// evaluate the trace with each constraint (i.e. separated automaton)
int i = 0;
byte[] temp;
for (SeparatedAutomatonOfflineRunner automatonOfflineRunner : automata) {
temp = new byte[trace.length];
automatonOfflineRunner.runTrace(trace, trace.length, temp);
results[i] = getTraceCounters(temp);
i++;
}
}
private static int[] getTraceCounters(byte[] reactiveConstraintEvaluation) {
int[] result = new int[9];
if (reactiveConstraintEvaluation.length == 0) return result;
// result { 0: activation, 1: target, 2: no activation, 3: no target}
// result {4: 00, 5: 01, , 6: 10, 7:11}
for (byte eval : reactiveConstraintEvaluation) {
result[0] += eval / 2; // the activator is true if the byte is >1, i.e. 2 or 3
result[1] += eval % 2; // the target is true if the byte is odd, i,e, 1 or 3
result[eval + 4]++;
}
int l = reactiveConstraintEvaluation.length;
result[2] = l - result[0];
result[3] = l - result[1];
result[8] = l;
return result;
}
/**
* Run a set of separatedAutomata over a full LogJanusModelCheckLauncher
* <p>
* About variable finalResult (byte[][][]) bytes meaning:
* Each byte stores the results of both Activator and target of a given constraint in a specific trace.
* The left bit is for the activator, the right bit for the target,i.e.,[activator-bit][target-bit]
* In details:
* 0 -> 00 -> Activator: False, Target: False
* 1 -> 01 -> Activator: False, Target: true
* 2 -> 10 -> Activator: True, Target: False
* 3 -> 11 -> Activator: True, Target: True
*
* @param logParser log reader
* @param automata set of separatedAutomata to test over the log
* @return ordered Array of supports for the full log for each automaton
*/
public void runLog(LogParser logParser, List<SeparatedAutomatonOfflineRunner> automata) {
byte[][][] finalResults = new byte[logParser.length()][automata.size() + 1][]; // TODO case length=0
logger.info("Basic result matrix created! Size: [" + logParser.length() + "][" + (automata.size() + 1) + "][*]");
int currentTraceNumber = 0;
int numberOfTotalTraces = logParser.length();
for (Iterator<LogTraceParser> it = logParser.traceIterator(); it.hasNext(); ) {
LogTraceParser tr = it.next();
runTrace(tr, automata, finalResults[currentTraceNumber]);
// MODEL TRACE EVALUATION
computeModelTraceEvaluation(finalResults[currentTraceNumber]);
currentTraceNumber++;
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rTraces: " + currentTraceNumber + "/" + numberOfTotalTraces); // Status counter "current trace/total trace"
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rTraces: " + currentTraceNumber + "/" + numberOfTotalTraces);
System.out.println();
}
this.megaMonster = new MegaMatrixMonster(finalResults, this.logParser, this.bag.getSeparatedAutomataOfflineRunners(), janusViewParams);
}
/**
* Compute the P2P evaluation of the trace joining the results of each separated automaton.
* <p>
* The rationale is: select only the activated automata and check if no activated target is violated
* <p>
* Evaluation, ie each point i
* if 10 in at least one i -> 10 : 2
* elif 11 in at least one i -> 11 : 3
* elif 00 in at least one i -> 00 : 0
* else -> 01 : 1
*
* @param finalResult
*/
private void computeModelTraceEvaluation(byte[][] finalResult) {
int traceLen = finalResult[0].length;
int modelIndex = finalResult.length - 1;
finalResult[modelIndex] = new byte[traceLen];
for (int i = 0; i < traceLen; i++) {
finalResult[modelIndex][i] = 1;
for (int c = 0; c < modelIndex; c++) {
if (finalResult[c][i] == 2) {
finalResult[modelIndex][i] = 2;
break;
}
// finalResult[modelIndex][i] |= finalResult[c][i]; // former version where, if not activated, at least one satisfied target results in 01
if (finalResult[c][i] == 3) finalResult[modelIndex][i] = 3;
if (finalResult[modelIndex][i] != 3 && finalResult[c][i] == 0) finalResult[modelIndex][i] = 0;
}
}
}
private void computeModelTraceEvaluationLite(int[][] finalResult) {
int traceLen = finalResult[0].length;
int modelIndex = finalResult.length - 1;
byte[] tempResults = new byte[traceLen];
for (int i = 0; i < traceLen; i++) {
for (int c = 0; c < modelIndex; c++) {
if (finalResult[c][i] == 2) {
finalResult[modelIndex][i] = 2;
break;
}
// finalResult[modelIndex][i] |= finalResult[c][i]; // former version where, if not activated, at least one satisfied target results in 01
if (finalResult[c][i] == 3) finalResult[modelIndex][i] = 3;
if (finalResult[modelIndex][i] != 3 && finalResult[c][i] == 0) finalResult[modelIndex][i] = 0;
}
}
finalResult[modelIndex] = getTraceCounters(tempResults);
}
/**
* Run a set of separatedAutomata over a full LogJanusModelCheckLauncher
* <p>
* About variable matrixLite (int[][][]) meaning:
* * compact version of the byte[][][] where instead of saving the result for each event, we keep only what is required for the traces measures computation.
* * Each int stores the counter of the results of a combination of Activator and target of a given constraint in a specific trace.
* * In details:
* * COUNTER INDEX -> Explanation
* * 0 -> Number of Activator: True [#]
* * 1 -> Number of Target: True [#]
* * 2 -> Number of Activator: False
* * 3 -> Number of Target: False
* * 4 -> Number of Activator: False, Target: False
* * 5 -> Number of Activator: False, Target: true
* * 6 -> Number of Activator: True, Target: False
* * 7 -> Number of Activator: True, Target: True [#]
* * 8 -> Trace lenght [#]
* * <p>
* * Note. Supposedly only 4 value (marked with #) are enough to derive all the others, but lets try to keep all 9 for now
*
* @param logParser log reader
* @param automata set of separatedAutomata to test over the log
* @return ordered Array of supports for the full log for each automaton
*/
public void runLogLite(LogParser logParser, List<SeparatedAutomatonOfflineRunner> automata) {
int[][][] finalResults = new int[logParser.length()][automata.size() + 1][9]; // TODO case length=0
logger.info("Basic result matrix-LITE created! Size: [" + logParser.length() + "][" + (automata.size() + 1) + "][9]");
int currentTraceNumber = 0;
int numberOfTotalTraces = logParser.length();
for (Iterator<LogTraceParser> it = logParser.traceIterator(); it.hasNext(); ) {
LogTraceParser tr = it.next();
runTraceLite(tr, automata, finalResults[currentTraceNumber]);
// MODEL TRACE EVALUATION
computeModelTraceEvaluationLite(finalResults[currentTraceNumber]);
currentTraceNumber++;
if (!janusViewParams.suppressMeasuresStatusPrint)
System.out.print("\rTraces: " + currentTraceNumber + "/" + numberOfTotalTraces); // Status counter "current trace/total trace"
}
if (!janusViewParams.suppressMeasuresStatusPrint) {
System.out.print("\rTraces: " + currentTraceNumber + "/" + numberOfTotalTraces);
System.out.println();
}
this.megaMonster = new MegaMatrixMonster(finalResults, this.logParser, this.bag.getSeparatedAutomataOfflineRunners(), janusViewParams);
}
/**
* Launcher for model checking
*
* @return
*/
public MegaMatrixMonster check() {
if (janusCheckingParams.liteFlag) {
runLogLite(this.logParser, this.bag.getSeparatedAutomataOfflineRunners());
} else {
runLog(this.logParser, this.bag.getSeparatedAutomataOfflineRunners());
}
return this.megaMonster;
}
@Override
public MegaMatrixMonster call() throws Exception {
return this.check();
}
}
| 12,966 | 42.367893 | 173 | java |
Janus | Janus-master/src/minerful/reactive/miner/ReactiveMinerOfflineQueryingCore.java | package minerful.reactive.miner;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.ConstraintsBag;
import minerful.logparser.LogParser;
import minerful.logparser.LogTraceParser;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.miner.stats.GlobalStatsTable;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import minerful.reactive.automaton.SeparatedAutomatonOfflineRunner;
import minerful.reactive.params.JanusPrintParameters;
import org.apache.log4j.Logger;
import java.time.Duration;
import java.time.Instant;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Callable;
/**
* Class to manage and organize the run of automata over a Log/Trace
*/
public class ReactiveMinerOfflineQueryingCore implements Callable<ConstraintsBag> {
protected static Logger logger;
private final LogParser logParser;
private final MinerFulCmdParameters minerFulParams;
private final JanusPrintParameters janusViewParams;
private final PostProcessingCmdParameters postPrarams;
private final TaskCharArchive taskCharArchive; // alphabet
private final GlobalStatsTable globalStatsTable;
private final ConstraintsBag bag; // rules to mine
private final int jobNum;
{
if (logger == null) {
logger = Logger.getLogger(ReactiveMinerOfflineQueryingCore.class.getCanonicalName());
}
}
/**
* Constructor
*
* @param jobNum
* @param logParser
* @param minerFulParams
* @param postPrarams
* @param taskCharArchive
* @param globalStatsTable
* @param bag
*/
public ReactiveMinerOfflineQueryingCore(int jobNum, LogParser logParser, MinerFulCmdParameters minerFulParams,
PostProcessingCmdParameters postPrarams, TaskCharArchive taskCharArchive,
GlobalStatsTable globalStatsTable, ConstraintsBag bag, JanusPrintParameters janusViewParams) {
this.jobNum = jobNum;
this.logParser = logParser;
this.minerFulParams = minerFulParams;
this.postPrarams = postPrarams;
this.taskCharArchive = taskCharArchive;
this.globalStatsTable = globalStatsTable;
this.bag = bag;
this.janusViewParams = janusViewParams;
}
/**
* Run a set of separatedAutomata over a single trace
*
* @param logTraceParser reader for a trace
* @param automata set of separatedAutomata to test over the trace
* @return boolean matrix with the evaluation in each single event of all the constraints
*/
public static int[][] runTrace(LogTraceParser logTraceParser, List<SeparatedAutomatonOfflineRunner> automata) {
int[][] results = new int[automata.size()][2]; // [0]fulfilled activations number, [1] activations number
// reset automata for a clean run
for (SeparatedAutomatonOfflineRunner automatonOfflineRunner : automata) {
automatonOfflineRunner.reset();
}
// retrieve the entire trace
logTraceParser.init();
char[] trace = logTraceParser.encodeTrace().toCharArray();
// evaluate the trace with each constraint (i.e. separated automaton)
int i = 0;
byte[] currentAutomatonResults;
for (SeparatedAutomatonOfflineRunner automatonOfflineRunner : automata) {
currentAutomatonResults = new byte[trace.length];
automatonOfflineRunner.runTrace(trace, trace.length, currentAutomatonResults);
// truth degree and activations
int fullfilments = 0;
int activations = 0;
for (byte eventEvaluation : currentAutomatonResults) {
switch (eventEvaluation) {
case 2:
activations++;
case 3:
activations++;
fullfilments++;
default:
continue;
}
}
results[i][0] = fullfilments;
results[i][1] = activations;
i++;
}
return results;
}
/**
* Run a set of separatedAutomata over a full Log
* <p>
* About variable finalResult (byte[][][]) bytes meaning:
* Each byte stores the results of both Activator and target of a given constraint in a specific trace.
* The left bit is for the activator, the right bit for the target,i.e.,[activator-bit][target-bit]
* In details:
* 0 -> 00 -> Activator: False, Target: False
* 1 -> 01 -> Activator: False, Target: true
* 2 -> 10 -> Activator: True, Target: False
* 3 -> 11 -> Activator: True, Target: True
*
* @param logParser log reader
* @param automata set of separatedAutomata to test over the log
* @return ordered Array of supports for the full log for each automaton
*/
public void runLog(LogParser logParser, List<SeparatedAutomatonOfflineRunner> automata) {
double[] finalResults = new double[automata.size()]; // TODO case length=0
int currentTraceNumber = 0;
int[] activeTraces = new int[automata.size()];
int numberOfTotalTraces = logParser.length();
Instant start = Instant.now();
Instant end = Instant.now();
Duration timeElapsed = Duration.between(start, end);
int samplingInterval = 300;
if (!janusViewParams.suppressDiscoveryStatusPrint)
System.out.print("\rTraces: " + currentTraceNumber + "/" + numberOfTotalTraces);
for (Iterator<LogTraceParser> it = logParser.traceIterator(); it.hasNext(); ) {
LogTraceParser tr = it.next();
int[][] partialResult = runTrace(tr, automata);
currentTraceNumber++;
// if (currentTraceNumber % samplingInterval == 0) {
if (!janusViewParams.suppressDiscoveryStatusPrint)
System.out.print("\rTraces: " + currentTraceNumber + "/" + numberOfTotalTraces); // Status counter "current trace/total trace"
// }
for (int i = 0; i < finalResults.length; i++) {
if (partialResult[i][1] > 0) {
finalResults[i] += partialResult[i][0] / partialResult[i][1];
activeTraces[i]++;
}
}
}
if (!janusViewParams.suppressDiscoveryStatusPrint) {
System.out.print("\rTraces: " + currentTraceNumber + "/" + numberOfTotalTraces);
System.out.println();
}
// Support and confidence of each constraint which respect to te log
for (int i = 0; i < automata.size(); i++) {
double support = finalResults[i] / currentTraceNumber;
double confidence = finalResults[i] / activeTraces[i];
this.bag.getConstraintOfOfflineRunner(automata.get(i)).setSupport(support);
this.bag.getConstraintOfOfflineRunner(automata.get(i)).setConfidence(confidence);
this.bag.getConstraintOfOfflineRunner(automata.get(i)).setInterestFactor(confidence);
}
}
/**
* Launcher for mining
*
* @return
*/
public ConstraintsBag discover() {
runLog(this.logParser, this.bag.getSeparatedAutomataOfflineRunners());
return this.bag;
}
@Override
public ConstraintsBag call() throws Exception {
return this.discover();
}
}
| 7,469 | 38.734043 | 143 | java |
Janus | Janus-master/src/minerful/reactive/miner/ReactiveMinerPruningCore.java | package minerful.reactive.miner;
import minerful.concept.ProcessModel;
import minerful.concept.constraint.Constraint;
import minerful.miner.core.MinerFulQueryingCore;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import org.apache.log4j.Logger;
/**
* Class for cleaning and pruning a process model mined through the reactive miner/separation technique
*/
public class ReactiveMinerPruningCore {
protected static Logger logger;
protected ProcessModel processModel;
protected MinerFulCmdParameters minerFulParams;
protected PostProcessingCmdParameters postParams;
{
if (logger == null) {
logger = Logger.getLogger(MinerFulQueryingCore.class.getCanonicalName());
}
}
public ReactiveMinerPruningCore(ProcessModel processModel, MinerFulCmdParameters minerFulParams, PostProcessingCmdParameters postParams) {
this.processModel = processModel;
this.minerFulParams = minerFulParams;
this.postParams = postParams;
}
/**
* Removes the constraints not considered in the mining process
*/
public void pruneNonActiveConstraints() {
logger.info("Pruning non active constraints...");
if(!this.postParams.cropRedundantAndInconsistentConstraints){
return;
}
for (Constraint c : this.processModel.bag.getAllConstraints()) {
if ((c.getConfidence() >= postParams.confidenceThreshold) && ((c.getSupport() >= postParams.supportThreshold))) continue;
this.processModel.bag.remove(c);
}
}
}
| 1,496 | 30.851064 | 139 | java |
Janus | Janus-master/src/minerful/reactive/miner/ReactiveMinerQueryingCore.java | package minerful.reactive.miner;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.ConstraintsBag;
import minerful.logparser.LogParser;
import minerful.logparser.LogTraceParser;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.miner.stats.GlobalStatsTable;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import minerful.reactive.automaton.SeparatedAutomatonRunner;
import org.apache.log4j.Logger;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Callable;
/**
* Class to manage and organize the run of automata over a Log/Trace
*/
public class ReactiveMinerQueryingCore implements Callable<ConstraintsBag> {
protected static Logger logger;
private final LogParser logParser;
private final MinerFulCmdParameters minerFulParams;
private final PostProcessingCmdParameters postPrarams;
private final TaskCharArchive taskCharArchive; // alphabet
private final GlobalStatsTable globalStatsTable;
private final ConstraintsBag bag; // rules to mine
private final int jobNum;
{
if (logger == null) {
logger = Logger.getLogger(ReactiveMinerQueryingCore.class.getCanonicalName());
}
}
/**
* Constructor
*
* @param jobNum
* @param logParser
* @param minerFulParams
* @param postPrarams
* @param taskCharArchive
* @param globalStatsTable
* @param bag
*/
public ReactiveMinerQueryingCore(int jobNum, LogParser logParser, MinerFulCmdParameters minerFulParams,
PostProcessingCmdParameters postPrarams, TaskCharArchive taskCharArchive,
GlobalStatsTable globalStatsTable, ConstraintsBag bag) {
this.jobNum = jobNum;
this.logParser = logParser;
this.minerFulParams = minerFulParams;
this.postPrarams = postPrarams;
this.taskCharArchive = taskCharArchive;
this.globalStatsTable = globalStatsTable;
this.bag = bag;
}
/**
* Run a set of separatedAutomata over a single trace
*
* @param logTraceParser reader for a trace
* @param automata set of separatedAutomata to test over the trace
* @return ordered Array of interestingness Degree for the trace for each automaton
*/
public static double[] runTrace(LogTraceParser logTraceParser, List<SeparatedAutomatonRunner> automata) {
double[] results = new double[automata.size()];
// reset automata for a clean run
for (SeparatedAutomatonRunner automatonRunner : automata) {
automatonRunner.reset();
}
// Step by step run of the automata
logTraceParser.init();
while (!logTraceParser.isParsingOver()) {
char transition = logTraceParser.parseSubsequentAndEncode();
for (SeparatedAutomatonRunner automatonRunner : automata) {
automatonRunner.step(transition);
}
}
// Retrieve result
int i = 0;
for (SeparatedAutomatonRunner automatonRunner : automata) {
results[i] = automatonRunner.getDegreeOfTruth();
i++;
}
return results;
}
/**
* Run a set of separatedAutomata over a full Log
*
* @param logParser log reader
* @param automata set of separatedAutomata to test over the log
* @return ordered Array of supports for the full log for each automaton
*/
public void runLog(LogParser logParser, List<SeparatedAutomatonRunner> automata) {
double[] finalResults = new double[automata.size()]; // TODO case length=0
int currentTraceNumber = 0;
int[] activeTraces = new int[automata.size()];
int numberOfTotalTraces = logParser.length();
for (Iterator<LogTraceParser> it = logParser.traceIterator(); it.hasNext(); ) {
LogTraceParser tr = it.next();
double[] partialResults = runTrace(tr, automata);
currentTraceNumber++;
System.out.print("\rTraces: " + currentTraceNumber + "/" + numberOfTotalTraces); // Status counter "current trace/total trace"
for (int i = 0; i < finalResults.length; i++) {
finalResults[i] += partialResults[i];
if (automata.get(i).isActivated()) activeTraces[i]++;
}
}
System.out.println();
// Support and confidence of each constraint which respect to te log
for (int i = 0; i < finalResults.length; i++) {
double support = finalResults[i] / currentTraceNumber;
double confidence = finalResults[i] / activeTraces[i];
this.bag.getConstraintOfRunner(automata.get(i)).setSupport(support);
this.bag.getConstraintOfRunner(automata.get(i)).setConfidence(confidence);
this.bag.getConstraintOfRunner(automata.get(i)).setInterestFactor(confidence);
}
}
/**
* Launcher for mining
*
* @return
*/
public ConstraintsBag discover() {
runLog(this.logParser, this.bag.getSeparatedAutomataRunners());
return this.bag;
}
@Override
public ConstraintsBag call() throws Exception {
return this.discover();
}
}
| 4,703 | 31.441379 | 130 | java |
Janus | Janus-master/src/minerful/reactive/params/JanusDFGVariantCmdParameters.java | package minerful.reactive.params;
import minerful.params.ParamsManager;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import java.io.File;
public class JanusDFGVariantCmdParameters extends ParamsManager {
// DFG Variant specific
public static final String INPUT_LOGFILE_1_PATH_PARAM_NAME = "iLF1"; // first log variant to analyse
public static final String INPUT_LOGFILE_2_PATH_PARAM_NAME = "iLF2"; // second log variant to analyse
public static final String P_VALUE_NAME = "pValue"; // p-value treshold for statistical relevance of the results. default: 0.01
public static final Double DEFAULT_P_VALUE = 0.01; // p-value treshold for statistical relevance of the results. default: 0.01
public static final String DIFFERENCE_THRESHOLD_NAME = "differenceThreshold"; // threshold for the time(sec.) difference between the variants to be considered relevant. default= 0.0
public static final Double DEFAULT_DIFFERENCE_THRESHOLD = 0.0; // threshold for the time difference to consider it relevant, default: "0.0"
// public static final String BEST_N_RESULTS_NAME = "bestNresults"; // number of rules in the TOP result list. default= 10
// public static final Integer DEFAULT_BEST_N_RESULTS_VALUE = 10; // number of rules in the TOP result list. default= 10
public static final String N_PERMUTATIONS_PARAM_NAME = "permutations";
public static final Integer DEFAULT_N_PERMUTATIONS = 1000; // default number of permutations
public static final String OUTPUT_FILE_CSV_PARAM_NAME = "oCSV";
public static final String OUTPUT_FILE_JSON_PARAM_NAME = "oJSON";
public static final String OUTPUT_KEEP_FLAG_NAME = "oKeep";
public static final String P_VALUE_ADJUSTMENT_METHOD_PARAM_NAME = "pValueAdjustment";
public static final JanusDFGVariantCmdParameters.PValueAdjustmentMethod DEFAULT_P_VALUE_ADJUSTMENT_METHOD = JanusDFGVariantCmdParameters.PValueAdjustmentMethod.hb;
// Log managing fom MINERful
public static final EventClassification DEFAULT_EVENT_CLASSIFICATION = EventClassification.name;
public static final LogInputEncoding DEFAULT_INPUT_LOG_ENCODING = LogInputEncoding.xes;
public static final String INPUT_LOG_1_ENCODING_PARAM_NAME = "iLE1"; // second log variant to analyse
public static final String INPUT_LOG_2_ENCODING_PARAM_NAME = "iLE2"; // second log variant to analyse
public static final String EVENT_CLASSIFICATION_PARAM_NAME = "iLClassif";
public static final String ENCODE_OUTPUT_TASKS_FLAG = "encodeTasksFlag";
public enum LogInputEncoding {
/**
* For XES logs (also compressed)
*/
xes,
/**
* For MXML logs (also compressed)
*/
mxml,
/**
* For string-encoded traces, where each character is assumed to be a task symbol
*/
strings;
}
public enum EventClassification {
name, logspec
}
public enum PValueAdjustmentMethod {
/**
* Do not apply correction
*/
none,
/**
* Holm-Bonferroni
*/
hb,
/**
* Benjamini-Hochberg
*/
bh
}
/**
* file of the first log variant to analyse
*/
public File inputLogFile1;
/**
* file of the second log variant to analyse
*/
public File inputLogFile2;
/**
* Encoding language for the first input event log
*/
public LogInputEncoding inputLogLanguage1;
/**
* Encoding language for the second input event log
*/
public LogInputEncoding inputLogLanguage2;
/**
* Classification policy to relate events to event classes, that is the task names
*/
public EventClassification eventClassification;
/**
* p-value treshold for statistical relevance of the results. default: 0.01
*/
public double pValue;
/**
* threshold for the difference of the variants constraints measurement to be considered relevant, default: "0.0"
*/
public double differenceThreshold;
/**
* number of permutations to perform, default: 1000
*/
public int nPermutations;
/**
* output file in CSV format
*/
public File outputCvsFile;
/**
* output file in JSON format
*/
public File outputJsonFile;
/**
* keep the irrelevant results in output
*/
public boolean oKeep;
/**
* Flag if the output tasks/events should be encoded (e.g., A B C D E...) or not (original names as in log)
**/
public boolean encodeOutputTasks;
/**
* method to adjust the pValues to address the Multiple test problem. Default=hb
**/
public JanusDFGVariantCmdParameters.PValueAdjustmentMethod pValueAdjustmentMethod;
public JanusDFGVariantCmdParameters() {
super();
this.inputLogLanguage1 = DEFAULT_INPUT_LOG_ENCODING;
this.inputLogLanguage2 = DEFAULT_INPUT_LOG_ENCODING;
this.eventClassification = DEFAULT_EVENT_CLASSIFICATION;
this.inputLogFile1 = null;
this.inputLogFile2 = null;
this.pValue = DEFAULT_P_VALUE;
this.nPermutations = DEFAULT_N_PERMUTATIONS;
this.outputCvsFile = null;
this.outputJsonFile = null;
this.oKeep = false;
this.encodeOutputTasks = false;
this.differenceThreshold = DEFAULT_DIFFERENCE_THRESHOLD;
this.pValueAdjustmentMethod = DEFAULT_P_VALUE_ADJUSTMENT_METHOD;
}
public JanusDFGVariantCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public JanusDFGVariantCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.inputLogFile1 = openInputFile(line, INPUT_LOGFILE_1_PATH_PARAM_NAME);
this.inputLogFile2 = openInputFile(line, INPUT_LOGFILE_2_PATH_PARAM_NAME);
this.inputLogLanguage1 = LogInputEncoding.valueOf(
line.getOptionValue(
INPUT_LOG_1_ENCODING_PARAM_NAME,
this.inputLogLanguage1.toString()
)
);
this.inputLogLanguage2 = LogInputEncoding.valueOf(
line.getOptionValue(
INPUT_LOG_2_ENCODING_PARAM_NAME,
this.inputLogLanguage2.toString()
)
);
this.eventClassification = EventClassification.valueOf(
line.getOptionValue(
EVENT_CLASSIFICATION_PARAM_NAME,
this.eventClassification.toString()
)
);
this.pValue = Double.parseDouble(
line.getOptionValue(
P_VALUE_NAME,
Double.toString(this.pValue)
)
);
this.differenceThreshold = Double.parseDouble(
line.getOptionValue(
DIFFERENCE_THRESHOLD_NAME,
Double.toString(this.differenceThreshold)
)
);
this.nPermutations = Integer.parseInt(
line.getOptionValue(
N_PERMUTATIONS_PARAM_NAME,
Integer.toString(this.nPermutations)
)
);
this.outputCvsFile = openOutputFile(line, OUTPUT_FILE_CSV_PARAM_NAME);
this.outputJsonFile = openOutputFile(line, OUTPUT_FILE_JSON_PARAM_NAME);
this.oKeep = line.hasOption(OUTPUT_KEEP_FLAG_NAME);
this.inputLogFile1 = openInputFile(line, INPUT_LOGFILE_1_PATH_PARAM_NAME);
this.inputLogFile2 = openInputFile(line, INPUT_LOGFILE_2_PATH_PARAM_NAME);
this.encodeOutputTasks = line.hasOption(OUTPUT_KEEP_FLAG_NAME);
this.pValueAdjustmentMethod = JanusDFGVariantCmdParameters.PValueAdjustmentMethod.valueOf(
line.getOptionValue(
P_VALUE_ADJUSTMENT_METHOD_PARAM_NAME,
this.pValueAdjustmentMethod.toString()
)
);
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt : myOptions.getOptions())
options.addOption((Option) myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(INPUT_LOGFILE_1_PATH_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("in-log-1-file")
.desc("path to read the log file from")
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_LOGFILE_2_PATH_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("in-log-2-file")
.desc("path to read the log file from")
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_LOG_1_ENCODING_PARAM_NAME)
.hasArg().argName("language")
.longOpt("in-log-1-encoding")
.desc("input encoding language " + printValues(LogInputEncoding.values())
+ printDefault(fromEnumValueToString(DEFAULT_INPUT_LOG_ENCODING)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_LOG_2_ENCODING_PARAM_NAME)
.hasArg().argName("language")
.longOpt("in-log-2-encoding")
.desc("input encoding language " + printValues(LogInputEncoding.values())
+ printDefault(fromEnumValueToString(DEFAULT_INPUT_LOG_ENCODING)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(EVENT_CLASSIFICATION_PARAM_NAME)
.hasArg().argName("class")
.longOpt("in-log-evt-classifier")
.desc("event classification (resp., by activity name, or according to the log-specified pattern) " + printValues(EventClassification.values())
+ printDefault(fromEnumValueToString(DEFAULT_EVENT_CLASSIFICATION)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(DIFFERENCE_THRESHOLD_NAME)
.hasArg().argName("number")
.longOpt("difference-threshold")
.desc("threshold for the difference of the variants constraints measurement to be considered relevant, default: 0.00")
.type(Double.class)
.build()
);
options.addOption(
Option.builder(P_VALUE_NAME)
.hasArg().argName("number")
.longOpt("p-value")
.desc("p-value threshold for statistical relevance of the results. default: 0.01")
.type(Double.class)
.build()
);
options.addOption(
Option.builder(N_PERMUTATIONS_PARAM_NAME)
.hasArg().argName("number")
.longOpt("number-of-permutations")
.desc("number of permutations to perform during the statistical test. default: 1000")
.type(Double.class)
.build()
);
options.addOption(
Option.builder(OUTPUT_FILE_CSV_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("out-csv-file")
.desc("path to output CSV file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(OUTPUT_FILE_JSON_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("out-json-file")
.desc("path to output JSON file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(OUTPUT_KEEP_FLAG_NAME)
// .isRequired(true) // Causing more problems than not
.longOpt("output-keep")
.desc("keep irrelevant results in output")
.type(Boolean.class)
.build()
);
options.addOption(
Option.builder(ENCODE_OUTPUT_TASKS_FLAG)
// .isRequired(true) // Causing more problems than not
.longOpt("flag-encoding-tasks")
.desc("Flag if the output tasks/events should be encoded")
.type(Boolean.class)
.build()
);
options.addOption(
Option.builder(P_VALUE_ADJUSTMENT_METHOD_PARAM_NAME)
.hasArg().argName("language")
.longOpt("p-value-adjustment-method")
.desc("pValue adjustment methods: Holm-Bonferroni, Benjamini-Hochberg " + printValues(JanusDFGVariantCmdParameters.PValueAdjustmentMethod.values())
+ printDefault(fromEnumValueToString(DEFAULT_P_VALUE_ADJUSTMENT_METHOD)))
.type(String.class)
.build()
);
return options;
}
}
| 14,436 | 41.090379 | 186 | java |
Janus | Janus-master/src/minerful/reactive/params/JanusMeasurementsCmdParameters.java | package minerful.reactive.params;
import minerful.params.ParamsManager;
import minerful.reactive.measurements.Measures;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class JanusMeasurementsCmdParameters extends ParamsManager {
public static final String NaN_TRACE_SUBSTITUTE_FLAG_PARAM_NAME = "nanTraceSubstitute";
public static final String NaN_TRACE_SUBSTITUTE_VALUE_PARAM_NAME = "nanTraceValue";
public static final String NaN_LOG_SKIP_FLAG_PARAM_NAME = "nanLogSkip";
public static final String LITE_FLAG_PARAM_NAME = "lite";
public static final String MEASURE_NAME = "measure"; // measure to use for the measurements, default: "all"
public static final String DEFAULT_MEASURE = "all";
public static final String DETAILS_LEVEL_PARAM_NAME = "detailsLevel";
public static final DetailLevel DEFAULT_DETAILS_LEVEL = DetailLevel.all;
public enum DetailLevel {
/**
* Compute and return only the events evaluation.
* Note. Regardless if it is returned in output or not,
* the events evaluation is computed anyway because it is the foundation of all the the other measures
*/
event,
/**
* Compute and return only the traces measurements
*/
trace,
/**
* Compute and return only the traces measurements descriptive statistics across the event log
*/
traceStats,
/**
* Compute and return only the log measurements
*/
log,
/**
* Compute and return only the traces measurements and their descriptive statistics across the event log
*/
allTrace,
/**
* Compute and return only the log measurements and the traces measurements descriptive statistics across the event log
*/
allLog,
/**
* Compute and return the measures everything [DEFAULT]
*/
all;
}
/**
* decide if a NaN should be kept as-is in a measure-trace evaluation should be substituted with a certain value
*/
public boolean nanTraceSubstituteFlag;
public double nanTraceSubstituteValue;
/**
* decide if a NaN should be skipped or not during the computation of the log level aggregated measures
*/
public boolean nanLogSkipFlag;
/**
* decide if to use the MEgaMatrixMonster (details for singles events) or the MegaMatrixLite (space reduction, only traces results)
*/
public boolean liteFlag;
/**
* parameter to set to output only the traces result, the aggregated measures, or both. default= both
**/
public DetailLevel detailsLevel;
/**
* measure to use for the comparison, default: "Confidence"
*/
public String measure;
public JanusMeasurementsCmdParameters() {
super();
this.nanTraceSubstituteFlag = false;
this.nanTraceSubstituteValue = 0;
this.nanLogSkipFlag = false;
this.liteFlag = false;
this.detailsLevel = DEFAULT_DETAILS_LEVEL;
this.measure = DEFAULT_MEASURE;
}
public JanusMeasurementsCmdParameters(boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue, boolean nanLogSkipFlag) {
super();
this.nanTraceSubstituteFlag = nanTraceSubstituteFlag;
this.nanTraceSubstituteValue = nanTraceSubstituteValue;
this.nanLogSkipFlag = nanLogSkipFlag;
}
public JanusMeasurementsCmdParameters(boolean nanTraceSubstituteFlag, double nanTraceSubstituteValue, boolean nanLogSkipFlag, boolean liteFlag) {
super();
this.nanTraceSubstituteFlag = nanTraceSubstituteFlag;
this.nanTraceSubstituteValue = nanTraceSubstituteValue;
this.nanLogSkipFlag = nanLogSkipFlag;
this.liteFlag = liteFlag;
}
public JanusMeasurementsCmdParameters(DetailLevel detailsLevel) {
super();
this.detailsLevel = detailsLevel;
}
public JanusMeasurementsCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public JanusMeasurementsCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
public boolean isNanTraceSubstituteFlag() {
return nanTraceSubstituteFlag;
}
public void setNanTraceSubstituteFlag(boolean nanTraceSubstituteFlag) {
this.nanTraceSubstituteFlag = nanTraceSubstituteFlag;
}
public double getNanTraceSubstituteValue() {
return nanTraceSubstituteValue;
}
public void setNanTraceSubstituteValue(double nanTraceSubstituteValue) {
this.nanTraceSubstituteValue = nanTraceSubstituteValue;
}
public boolean isNanLogSkipFlag() {
return nanLogSkipFlag;
}
public void setNanLogSkipFlag(boolean nanLogSkipFlag) {
this.nanLogSkipFlag = nanLogSkipFlag;
}
public boolean isLiteFlag() {
return liteFlag;
}
public void setLiteFlag(boolean liteFlag) {
this.liteFlag = liteFlag;
}
public static String getDefaultMeasure() {
return DEFAULT_MEASURE;
}
@Override
protected void setup(CommandLine line) {
this.nanTraceSubstituteFlag = line.hasOption(NaN_TRACE_SUBSTITUTE_FLAG_PARAM_NAME);
this.nanTraceSubstituteValue = Double.parseDouble(line.getOptionValue(
NaN_TRACE_SUBSTITUTE_VALUE_PARAM_NAME,
Double.toString(this.nanTraceSubstituteValue)
)
);
this.nanLogSkipFlag = line.hasOption(NaN_LOG_SKIP_FLAG_PARAM_NAME);
this.liteFlag = line.hasOption(LITE_FLAG_PARAM_NAME);
this.detailsLevel = DetailLevel.valueOf(
line.getOptionValue(
DETAILS_LEVEL_PARAM_NAME,
this.detailsLevel.toString()
)
);
this.measure = line.getOptionValue(
MEASURE_NAME,
DEFAULT_MEASURE
);
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt : myOptions.getOptions())
options.addOption((Option) myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(NaN_TRACE_SUBSTITUTE_FLAG_PARAM_NAME)
.longOpt("nan-trace-substitute")
.desc("Flag to substitute or not the NaN values when computing trace measures")
.build()
);
options.addOption(
Option.builder(NaN_TRACE_SUBSTITUTE_VALUE_PARAM_NAME)
.hasArg().argName("number")
.longOpt("nan-trace-value")
.desc("Value to be substituted to NaN values in trace measures")
.type(String.class)
.build()
);
options.addOption(
Option.builder(NaN_LOG_SKIP_FLAG_PARAM_NAME)
.longOpt("nan-log-skip")
.desc("Flag to skip or not NaN values when computing log measures")
.build()
);
options.addOption(
Option.builder(LITE_FLAG_PARAM_NAME)
.longOpt("lite-flag")
.desc("Flag to use the space saving data structure")
.build()
);
options.addOption(
Option.builder(DETAILS_LEVEL_PARAM_NAME)
.hasArg().argName("name")
.longOpt("details-level")
.desc(("levels of details of the measures to compute. {event(only events evaluation), trace(only trace measures), traceStats(only trace measures log stats), log(only log measures), allTrace(traces measures and their stats), allLog(log measures and traces measures stats), all}. Default: all")
+ printDefault(fromEnumValueToString(DEFAULT_DETAILS_LEVEL)))
.type(String.class)
.build()
);
StringBuilder allMeasures= new StringBuilder();
allMeasures.append("'all'");
for (String m:Measures.MEASURE_NAMES) {
allMeasures.append(",'"+m+"'");
}
options.addOption(
Option.builder(MEASURE_NAME)
.hasArg().argName("name")
.longOpt("measure")
.desc(("measure to compute, either a specific one or all the supported ones. {"+ allMeasures +"}")
+ printDefault(fromEnumValueToString(DEFAULT_MEASURE)))
.type(String.class)
.build()
);
return options;
}
} | 9,200 | 36.555102 | 316 | java |
Janus | Janus-master/src/minerful/reactive/params/JanusPrintParameters.java | package minerful.reactive.params;
import minerful.params.ParamsManager;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class JanusPrintParameters extends ParamsManager {
public static final String SUPPRESS_SCREEN_PRINT_OUT_RESULTS_PARAM_NAME = "suppressResultsPrintOut";
public static final String SUPPRESS_SCREEN_PRINT_OUT_DISCOVERY_STATUS_PARAM_NAME = "suppressDiscoveryStatus";
public static final String SUPPRESS_SCREEN_PRINT_OUT_MEASURES_STATUS_PARAM_NAME = "suppressMeasuresStatus";
public static final String SUPPRESS_SCREEN_PRINT_OUT_PERMUTATION_STATUS_PARAM_NAME = "suppressPermutationStatus";
public static final Boolean DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_RESULTS = false;
public static final Boolean DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_DISCOVERY_STATUS = false;
public static final Boolean DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_MEASURES_STATUS = false;
public static final Boolean DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_PERMUTATION_STATUS = false;
/**
* Set this field to <code>true</code> to avoid the results to be printed on terminal.
*/
public Boolean suppressResultsPrintOut;
/**
* Set this field to <code>true</code> to avoid the status bar of discovery to be printed.
*/
public Boolean suppressDiscoveryStatusPrint;
/**
* Set this field to <code>true</code> to avoid the status bar of measuring to be printed.
*/
public Boolean suppressMeasuresStatusPrint;
/**
* Set this field to <code>true</code> to avoid the status bar of permutations to be printed.
*/
public Boolean suppressPermutationStatusPrint;
/**
*
*/
public JanusPrintParameters() {
super();
suppressResultsPrintOut =DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_RESULTS;
suppressDiscoveryStatusPrint = DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_DISCOVERY_STATUS;
suppressMeasuresStatusPrint = DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_MEASURES_STATUS;
suppressPermutationStatusPrint = DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_PERMUTATION_STATUS;
}
public JanusPrintParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public JanusPrintParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.suppressResultsPrintOut = line.hasOption(SUPPRESS_SCREEN_PRINT_OUT_RESULTS_PARAM_NAME);
this.suppressDiscoveryStatusPrint = line.hasOption(SUPPRESS_SCREEN_PRINT_OUT_DISCOVERY_STATUS_PARAM_NAME);
this.suppressMeasuresStatusPrint = line.hasOption(SUPPRESS_SCREEN_PRINT_OUT_MEASURES_STATUS_PARAM_NAME);
this.suppressPermutationStatusPrint = line.hasOption(SUPPRESS_SCREEN_PRINT_OUT_PERMUTATION_STATUS_PARAM_NAME);
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(SUPPRESS_SCREEN_PRINT_OUT_RESULTS_PARAM_NAME)
.longOpt("no-screen-print-out-results")
.desc("suppresses the print-out of results" +
printDefault(DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_RESULTS))
.type(Boolean.class)
.build()
);
options.addOption(
Option.builder(SUPPRESS_SCREEN_PRINT_OUT_DISCOVERY_STATUS_PARAM_NAME)
.longOpt("no-screen-print-out-discovery-status")
.desc("suppresses the print-out of discovery status bar" +
printDefault(DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_DISCOVERY_STATUS))
.type(Boolean.class)
.build()
);
options.addOption(
Option.builder(SUPPRESS_SCREEN_PRINT_OUT_MEASURES_STATUS_PARAM_NAME)
.longOpt("no-screen-print-out-measures-status")
.desc("suppresses the print-out of measurements status bar" +
printDefault(DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_MEASURES_STATUS))
.type(Boolean.class)
.build()
);
options.addOption(
Option.builder(SUPPRESS_SCREEN_PRINT_OUT_PERMUTATION_STATUS_PARAM_NAME)
.longOpt("no-screen-print-out-permutation-status")
.desc("suppresses the print-out of permutation test status bar" +
printDefault(DEFAULT_DO_SUPPRESS_SCREEN_PRINT_OUT_PERMUTATION_STATUS))
.type(Boolean.class)
.build()
);
return options;
}
}
| 4,981 | 46 | 118 | java |
Janus | Janus-master/src/minerful/reactive/params/JanusVariantCmdParameters.java | package minerful.reactive.params;
import minerful.io.params.InputModelParameters;
import minerful.params.ParamsManager;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import java.io.File;
public class JanusVariantCmdParameters extends ParamsManager {
// Variant specific
public static final String INPUT_LOGFILE_1_PATH_PARAM_NAME = "iLF1"; // first log variant to analyse
public static final String INPUT_LOGFILE_2_PATH_PARAM_NAME = "iLF2"; // second log variant to analyse
public static final String P_VALUE_NAME = "pValue"; // p-value treshold for statistical relevance of the results. default: 0.01
public static final Double DEFAULT_P_VALUE = 0.01; // p-value treshold for statistical relevance of the results. default: 0.01
public static final String MEASURE_NAME = "measure"; // measure to use for the comparison, default: "confidence"
public static final String DEFAULT_MEASURE = "Confidence"; // measure to use for the comparison, default: "confidence"
public static final String MEASURE_THRESHOLD_NAME = "measureThreshold"; // threshold for the measure to consider it relevant, default: "0.8"
public static final Double DEFAULT_MEASURE_THRESHOLD = 0.0; // threshold for the measure to consider it relevant, default: "0.0"
public static final String DIFFERENCE_THRESHOLD_NAME = "differenceThreshold"; // threshold for the difference of the variants constraints measurement to be considered relevant. default= 0.01
public static final Double DEFAULT_DIFFERENCE_THRESHOLD = 0.01; // threshold for the measure to consider it relevant, default: "0.8"
public static final String SIMPLIFICATION_FLAG = "simplify"; // flag to simplify the result rules list according to their hierarchy
public static final String BEST_N_RESULTS_NAME = "bestNresults"; // number of rules in the TOP result list. default= 10
public static final Integer DEFAULT_BEST_N_RESULTS_VALUE = 10; // number of rules in the TOP result list. default= 10
public static final String P_VALUE_ADJUSTMENT_METHOD_PARAM_NAME = "pValueAdjustment";
public static final PValueAdjustmentMethod DEFAULT_P_VALUE_ADJUSTMENT_METHOD = PValueAdjustmentMethod.hb;
// Log managing fom MINERful
public static final EventClassification DEFAULT_EVENT_CLASSIFICATION = EventClassification.name;
public static final LogInputEncoding DEFAULT_INPUT_LOG_ENCODING = LogInputEncoding.xes;
public static final String INPUT_LOG_1_ENCODING_PARAM_NAME = "iLE1"; // second log variant to analyse
public static final String INPUT_LOG_2_ENCODING_PARAM_NAME = "iLE2"; // second log variant to analyse
public static final String EVENT_CLASSIFICATION_PARAM_NAME = "iLClassif";
public static final String N_PERMUTATIONS_PARAM_NAME = "permutations";
public static final String OUTPUT_FILE_CSV_PARAM_NAME = "oCSV";
public static final String OUTPUT_FILE_JSON_PARAM_NAME = "oJSON";
public static final String OUTPUT_KEEP_FLAG_NAME = "oKeep";
public static final String SAVE_MODEL_1_AS_CSV_PARAM_NAME = "oModel1CSV";
public static final String SAVE_MODEL_2_AS_CSV_PARAM_NAME = "oModel2CSV";
public static final String SAVE_MODEL_1_AS_JSON_PARAM_NAME = "oModel1JSON";
public static final String SAVE_MODEL_2_AS_JSON_PARAM_NAME = "oModel2JSON";
public static final String ENCODE_OUTPUT_TASKS_FLAG = "encodeTasksFlag";
public static final String INPUT_MODELFILE_1_PATH_PARAM_NAME = "iMF1";
public static final String INPUT_MODEL_ENCODING_1_PARAM_NAME = "iME1";
public static final String INPUT_MODELFILE_2_PATH_PARAM_NAME = "iMF2";
public static final String INPUT_MODEL_ENCODING_2_PARAM_NAME = "iME2";
public static final InputModelParameters.InputEncoding DEFAULT_INPUT_MODEL_ENCODING = InputModelParameters.InputEncoding.JSON;
public enum LogInputEncoding {
/**
* For XES logs (also compressed)
*/
xes,
/**
* For MXML logs (also compressed)
*/
mxml,
/**
* For string-encoded traces, where each character is assumed to be a task symbol
*/
strings;
}
public enum EventClassification {
name, logspec
}
public enum PValueAdjustmentMethod {
/**
* Do not apply correction
*/
none,
/**
* Holm-Bonferroni
*/
hb,
/**
* Benjamini-Hochberg
*/
bh
}
/**
* file of the first log variant to analyse
*/
public File inputLogFile1;
/**
* file of the second log variant to analyse
*/
public File inputLogFile2;
/**
* Encoding language for the first input event log
*/
public LogInputEncoding inputLogLanguage1;
/**
* Encoding language for the second input event log
*/
public LogInputEncoding inputLogLanguage2;
/**
* file of the process model for the first log variant to analyse
*/
public File inputModelFile1;
/**
* file of the process model for the second log variant to analyse
*/
public File inputModelFile2;
/**
* Encoding language for the first input model
*/
public InputModelParameters.InputEncoding inputModelLanguage1;
/**
* Encoding language for the second input model
*/
public InputModelParameters.InputEncoding inputModelLanguage2;
/**
* Classification policy to relate events to event classes, that is the task names
*/
public EventClassification eventClassification;
/**
* p-value treshold for statistical relevance of the results. default: 0.01
*/
public double pValue;
/**
* measure to use for the comparison, default: "Confidence"
*/
public String measure;
/**
* threshold for the measure to consider it relevant, default: "0.8"
*/
public double measureThreshold;
/**
* threshold for the difference of the variants constraints measurement to be considered relevant, default: "0.01"
*/
public double differenceThreshold;
/**
* number of permutations to perform, default: 1000
*/
public int nPermutations;
/**
* output file in CSV format
*/
public File outputCvsFile;
/**
* output file in JSON format
*/
public File outputJsonFile;
/**
* keep the irrelevant results in output
*/
public boolean oKeep;
/**
* File in which discovered constraints for variant 1 are printed in CSV format. Keep it equal to <code>null</code> for avoiding such print-out.
*/
public File fileToSaveModel1AsCSV;
/**
* File in which discovered constraints for variant 2 are printed in CSV format. Keep it equal to <code>null</code> for avoiding such print-out.
*/
public File fileToSaveModel2AsCSV;
/**
* File in which the discovered process model for variant 1 is saved as a JSON file. Keep it equal to <code>null</code> for avoiding such print-out.
*/
public File fileToSaveModel1AsJSON;
/**
* File in which the discovered process model for variant 2 is saved as a JSON file. Keep it equal to <code>null</code> for avoiding such print-out.
*/
public File fileToSaveModel2AsJSON;
/**
* Flag if the output tasks/events should be encoded (e.g., A B C D E...) or not (original names as in log)
**/
public boolean encodeOutputTasks;
/**
* Flag if the rules set returned from the permutation test should be simplified according to the rules hierarchy. Default=false
**/
public boolean simplify;
/**
* Number of rules for the TOP results. Default=10
**/
public int bestNresults;
/**
* method to adjust the pValues to address the Multiple test problem. Default=hb
**/
public PValueAdjustmentMethod pValueAdjustmentMethod;
public JanusVariantCmdParameters() {
super();
this.inputLogLanguage1 = DEFAULT_INPUT_LOG_ENCODING;
this.inputLogLanguage2 = DEFAULT_INPUT_LOG_ENCODING;
this.inputModelLanguage1 = DEFAULT_INPUT_MODEL_ENCODING;
this.inputModelLanguage2 = DEFAULT_INPUT_MODEL_ENCODING;
this.eventClassification = DEFAULT_EVENT_CLASSIFICATION;
this.inputLogFile1 = null;
this.inputLogFile2 = null;
this.inputModelFile1 = null;
this.inputModelFile2 = null;
this.pValue = DEFAULT_P_VALUE;
this.measure = DEFAULT_MEASURE;
this.measureThreshold = DEFAULT_MEASURE_THRESHOLD;
this.nPermutations = 1000;
this.outputCvsFile = null;
this.outputJsonFile = null;
this.oKeep = false;
this.fileToSaveModel1AsCSV = null;
this.fileToSaveModel2AsCSV = null;
this.fileToSaveModel1AsJSON = null;
this.fileToSaveModel2AsJSON = null;
this.encodeOutputTasks = false;
this.simplify = false;
this.differenceThreshold = DEFAULT_DIFFERENCE_THRESHOLD;
this.bestNresults = DEFAULT_BEST_N_RESULTS_VALUE;
this.pValueAdjustmentMethod = DEFAULT_P_VALUE_ADJUSTMENT_METHOD;
}
public JanusVariantCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public JanusVariantCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
this.inputLogFile1 = openInputFile(line, INPUT_LOGFILE_1_PATH_PARAM_NAME);
this.inputLogFile2 = openInputFile(line, INPUT_LOGFILE_2_PATH_PARAM_NAME);
this.inputLogLanguage1 = LogInputEncoding.valueOf(
line.getOptionValue(
INPUT_LOG_1_ENCODING_PARAM_NAME,
this.inputLogLanguage1.toString()
)
);
this.inputLogLanguage2 = LogInputEncoding.valueOf(
line.getOptionValue(
INPUT_LOG_2_ENCODING_PARAM_NAME,
this.inputLogLanguage2.toString()
)
);
this.eventClassification = EventClassification.valueOf(
line.getOptionValue(
EVENT_CLASSIFICATION_PARAM_NAME,
this.eventClassification.toString()
)
);
this.inputModelFile1 = openInputFile(line, INPUT_MODELFILE_1_PATH_PARAM_NAME);
this.inputModelFile2 = openInputFile(line, INPUT_MODELFILE_2_PATH_PARAM_NAME);
this.inputModelLanguage1 = InputModelParameters.InputEncoding.valueOf(
line.getOptionValue(
INPUT_MODEL_ENCODING_1_PARAM_NAME,
this.inputModelLanguage1.toString()
)
);
this.inputModelLanguage2 = InputModelParameters.InputEncoding.valueOf(
line.getOptionValue(
INPUT_MODEL_ENCODING_2_PARAM_NAME,
this.inputModelLanguage2.toString()
)
);
this.pValue = Double.parseDouble(
line.getOptionValue(
P_VALUE_NAME,
Double.toString(this.pValue)
)
);
this.measure = line.getOptionValue(
MEASURE_NAME,
DEFAULT_MEASURE
);
this.measureThreshold = Double.parseDouble(
line.getOptionValue(
MEASURE_THRESHOLD_NAME,
Double.toString(this.measureThreshold)
)
);
this.differenceThreshold = Double.parseDouble(
line.getOptionValue(
DIFFERENCE_THRESHOLD_NAME,
Double.toString(this.differenceThreshold)
)
);
this.nPermutations = Integer.parseInt(
line.getOptionValue(
N_PERMUTATIONS_PARAM_NAME,
Integer.toString(this.nPermutations)
)
);
this.outputCvsFile = openOutputFile(line, OUTPUT_FILE_CSV_PARAM_NAME);
this.outputJsonFile = openOutputFile(line, OUTPUT_FILE_JSON_PARAM_NAME);
this.oKeep = line.hasOption(OUTPUT_KEEP_FLAG_NAME);
this.inputLogFile1 = openInputFile(line, INPUT_LOGFILE_1_PATH_PARAM_NAME);
this.inputLogFile2 = openInputFile(line, INPUT_LOGFILE_2_PATH_PARAM_NAME);
this.fileToSaveModel1AsCSV = openOutputFile(line, SAVE_MODEL_1_AS_CSV_PARAM_NAME);
this.fileToSaveModel2AsCSV = openOutputFile(line, SAVE_MODEL_2_AS_CSV_PARAM_NAME);
this.fileToSaveModel1AsJSON = openOutputFile(line, SAVE_MODEL_1_AS_JSON_PARAM_NAME);
this.fileToSaveModel2AsJSON = openOutputFile(line, SAVE_MODEL_2_AS_JSON_PARAM_NAME);
this.encodeOutputTasks = line.hasOption(OUTPUT_KEEP_FLAG_NAME);
this.simplify = line.hasOption(SIMPLIFICATION_FLAG);
this.bestNresults = Integer.parseInt(
line.getOptionValue(
BEST_N_RESULTS_NAME,
Integer.toString(this.bestNresults)
)
);
this.pValueAdjustmentMethod = PValueAdjustmentMethod.valueOf(
line.getOptionValue(
P_VALUE_ADJUSTMENT_METHOD_PARAM_NAME,
this.pValueAdjustmentMethod.toString()
)
);
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt : myOptions.getOptions())
options.addOption((Option) myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(INPUT_LOGFILE_1_PATH_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("in-log-1-file")
.desc("path to read the log file from")
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_LOGFILE_2_PATH_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("in-log-2-file")
.desc("path to read the log file from")
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_LOG_1_ENCODING_PARAM_NAME)
.hasArg().argName("language")
.longOpt("in-log-1-encoding")
.desc("input encoding language " + printValues(LogInputEncoding.values())
+ printDefault(fromEnumValueToString(DEFAULT_INPUT_LOG_ENCODING)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_LOG_2_ENCODING_PARAM_NAME)
.hasArg().argName("language")
.longOpt("in-log-2-encoding")
.desc("input encoding language " + printValues(LogInputEncoding.values())
+ printDefault(fromEnumValueToString(DEFAULT_INPUT_LOG_ENCODING)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(EVENT_CLASSIFICATION_PARAM_NAME)
.hasArg().argName("class")
.longOpt("in-log-evt-classifier")
.desc("event classification (resp., by activity name, or according to the log-specified pattern) " + printValues(EventClassification.values())
+ printDefault(fromEnumValueToString(DEFAULT_EVENT_CLASSIFICATION)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_MODELFILE_1_PATH_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("in-model-1-file")
.desc("path to read the input model file from")
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_MODELFILE_2_PATH_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("in-model-2-file")
.desc("path to read the input model file from")
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_MODEL_ENCODING_1_PARAM_NAME)
.hasArg().argName("language")
.longOpt("in-model-1-encoding")
.desc("input encoding language " + printValues(InputModelParameters.InputEncoding.values())
+ printDefault(fromEnumValueToString(DEFAULT_INPUT_MODEL_ENCODING)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(INPUT_MODEL_ENCODING_2_PARAM_NAME)
.hasArg().argName("language")
.longOpt("in-model-2-encoding")
.desc("input encoding language " + printValues(InputModelParameters.InputEncoding.values())
+ printDefault(fromEnumValueToString(DEFAULT_INPUT_MODEL_ENCODING)))
.type(String.class)
.build()
);
options.addOption(
Option.builder(MEASURE_NAME)
.hasArg().argName("name")
.longOpt("measure")
.desc("measure to use for the comparison of the variants. default: Confidence")
.type(String.class)
.build()
);
options.addOption(
Option.builder(MEASURE_THRESHOLD_NAME)
.hasArg().argName("number")
.longOpt("measure-threshold")
.desc("threshold to consider the measure relevant. default: 0.0")
.type(Double.class)
.build()
);
options.addOption(
Option.builder(DIFFERENCE_THRESHOLD_NAME)
.hasArg().argName("number")
.longOpt("difference-threshold")
.desc("threshold for the difference of the variants constraints measurement to be considered relevant, default: 0.01")
.type(Double.class)
.build()
);
options.addOption(
Option.builder(P_VALUE_NAME)
.hasArg().argName("number")
.longOpt("p-value")
.desc("p-value threshold for statistical relevance of the results. default: 0.01")
.type(Double.class)
.build()
);
options.addOption(
Option.builder(N_PERMUTATIONS_PARAM_NAME)
.hasArg().argName("number")
.longOpt("number-of-permutations")
.desc("number of permutations to perform during the statistical test. If <=0 the number is auto-set for the Multiple Testing correction. default: 1000")
.type(Double.class)
.build()
);
options.addOption(
Option.builder(OUTPUT_FILE_CSV_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("out-csv-file")
.desc("path to output CSV file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(OUTPUT_FILE_JSON_PARAM_NAME)
.hasArg().argName("path")
// .isRequired(true) // Causing more problems than not
.longOpt("out-json-file")
.desc("path to output JSON file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(OUTPUT_KEEP_FLAG_NAME)
// .isRequired(true) // Causing more problems than not
.longOpt("output-keep")
.desc("keep irrelevant results in output")
.type(Boolean.class)
.build()
);
options.addOption(
Option.builder(SAVE_MODEL_1_AS_CSV_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-model-1-as-csv")
.desc("print discovered model 1 in CSV format into the specified file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(SAVE_MODEL_2_AS_CSV_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-model-2-as-csv")
.desc("print discovered model 2 in CSV format into the specified file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(SAVE_MODEL_1_AS_JSON_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-model-1-as-json")
.desc("print discovered model 1 in JSON format into the specified file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(SAVE_MODEL_2_AS_JSON_PARAM_NAME)
.hasArg().argName("path")
.longOpt("save-model-2-as-json")
.desc("print discovered model 2 in JSON format into the specified file")
.type(String.class)
.build()
);
options.addOption(
Option.builder(ENCODE_OUTPUT_TASKS_FLAG)
// .isRequired(true) // Causing more problems than not
.longOpt("flag-encoding-tasks")
.desc("Flag if the output tasks/events should be encoded")
.type(Boolean.class)
.build()
);
options.addOption(
Option.builder(SIMPLIFICATION_FLAG)
// .isRequired(true) // Causing more problems than not
.longOpt("simplification-flag")
.desc("Flag if the output rules set shoul dbe simplified according to rules hierarchy. Default: false")
.type(Boolean.class)
.build()
);
options.addOption(
Option.builder(BEST_N_RESULTS_NAME)
.hasArg().argName("number")
.longOpt("number-of-best-results")
.desc("Number of rules to return in among the best results. Default: 10")
.type(Integer.class)
.build()
);
options.addOption(
Option.builder(P_VALUE_ADJUSTMENT_METHOD_PARAM_NAME)
.hasArg().argName("language")
.longOpt("p-value-adjustment-method")
.desc("pValue adjustment methods: Holm-Bonferroni, Benjamini-Hochberg " + printValues(PValueAdjustmentMethod.values())
+ printDefault(fromEnumValueToString(DEFAULT_P_VALUE_ADJUSTMENT_METHOD)))
.type(String.class)
.build()
);
return options;
}
}
| 24,707 | 43.200358 | 195 | java |
Janus | Janus-master/src/minerful/reactive/variant/DFGEncodedEvent.java | package minerful.reactive.variant;
import minerful.concept.AbstractTaskClass;
/**
* Event object containing only the task class and the timestamp in seconds
*/
public class DFGEncodedEvent {
public AbstractTaskClass eventsSequence;
public long timesSequence;
/**
* @param eventsSequence
* @param timesSequence
*/
public DFGEncodedEvent(AbstractTaskClass eventsSequence, long timesSequence) {
this.eventsSequence = eventsSequence;
this.timesSequence = timesSequence;
}
}
| 526 | 24.095238 | 82 | java |
Janus | Janus-master/src/minerful/reactive/variant/DFGEncodedLog.java | package minerful.reactive.variant;
import minerful.logparser.LogTraceParser;
import minerful.logparser.XesEventParser;
import minerful.logparser.XesLogParser;
import minerful.logparser.XesTraceParser;
import org.deckfour.xes.model.XAttributeTimestamp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class DFGEncodedLog {
public List<List<DFGEncodedEvent>> traces;
XesLogParser eventLogParser;
/**
* Encode the given XES event log
*
* @param eventLogParser
*/
public DFGEncodedLog(XesLogParser eventLogParser) {
this.eventLogParser = eventLogParser;
traces = new ArrayList<>(eventLogParser.length());
for (Iterator<LogTraceParser> logIterator = eventLogParser.traceIterator(); logIterator.hasNext(); ) {
XesTraceParser traceParser = (XesTraceParser) logIterator.next();
traceParser.init(); // otherwise, if the trace was already read, the iterator is pointing to the end of the trace
List<DFGEncodedEvent> currentTrace = new ArrayList<>(traceParser.length());
while (!traceParser.isParsingOver()) {
XesEventParser current = (XesEventParser) traceParser.parseSubsequent();
// Keep only the seconds, not the milliseconds
currentTrace.add(new DFGEncodedEvent(
current.getEvent().getTaskClass(),
TimeUnit.SECONDS.convert(((XAttributeTimestamp) current.xesEvent.getAttributes().get("time:timestamp")).getValue().getTime(), TimeUnit.MILLISECONDS)
));
}
traces.add(currentTrace);
}
}
/**
* Shuffles randomly the order of the traces
*/
public void shuffleTraces() {
Collections.shuffle(traces);
}
/**
* returns an encoded log merging this and the input encoded logs
*
* @param otherLog
* @return
*/
public DFGEncodedLog merge(DFGEncodedLog otherLog) {
DFGEncodedLog result = new DFGEncodedLog(eventLogParser);
result.traces.addAll(otherLog.traces);
return result;
}
/**
* returns the number of traces in this encoded log
*
* @return
*/
public int length() {
return traces.size();
}
}
| 2,380 | 32.069444 | 172 | java |
Janus | Janus-master/src/minerful/reactive/variant/DFGPermutationResult.java | package minerful.reactive.variant;
public class DFGPermutationResult {
public String sourceNode; // source node of the transition
public String destinationNode; // destination node of the transition
public String kind; //AVG/MIN/MAX
public double pValue;
public double diff;
public double log1Value;
public double log2Value;
/**
* @param sourceNode
* @param destinationNode
* @param kind
* @param pValue
* @param diff
* @param log1Value
* @param log2Value
*/
public DFGPermutationResult(String sourceNode, String destinationNode, String kind, double pValue, double diff, double log1Value, double log2Value) {
this.sourceNode = sourceNode;
this.destinationNode = destinationNode;
this.kind = kind;
this.pValue = pValue;
this.diff = diff;
this.log1Value = log1Value;
this.log2Value = log2Value;
}
@Override
public String toString() {
return sourceNode + "-->" + destinationNode + ' ' +
kind + ' ' +
", pValue=" + pValue +
", diff=" + diff +
", log1Value=" + log1Value +
", log2Value=" + log2Value;
}
}
| 1,239 | 29.243902 | 153 | java |
Janus | Janus-master/src/minerful/reactive/variant/DFGtimesVariantAnalysisCore.java | package minerful.reactive.variant;
import minerful.concept.TaskChar;
import minerful.concept.TaskClass;
import minerful.logparser.*;
import minerful.reactive.dfg.DFG;
import minerful.reactive.dfg.DFGTransition;
import minerful.reactive.params.JanusDFGVariantCmdParameters;
import minerful.reactive.params.JanusPrintParameters;
import org.apache.log4j.Logger;
import java.time.Duration;
import java.time.Instant;
import java.util.*;
/**
* Class to organize the variant analysis based on DFG times
*/
public class DFGtimesVariantAnalysisCore {
protected static Logger logger;
{
if (logger == null) {
logger = Logger.getLogger(DFGtimesVariantAnalysisCore.class.getCanonicalName());
}
}
private final XesLogParser logParser1; // original log1 parser
private final XesLogParser logParser2; // original log2 parser
private final JanusDFGVariantCmdParameters janusVariantParams; // input parameter of the analysis
private final JanusPrintParameters janusViewParams; // input parameter of the analysis
private Map<Integer, TaskClass> indexToTaskMap;
private Map<TaskClass, Integer> TaskToIndexMap;
public DFGtimesVariantAnalysisCore(XesLogParser logParser_1, XesLogParser logParser_2, JanusDFGVariantCmdParameters janusParams, JanusPrintParameters janusViewParams) {
this.logParser1 = logParser_1;
this.logParser2 = logParser_2;
this.janusVariantParams = janusParams;
this.janusViewParams = janusViewParams;
}
/**
* Launcher for the variants check using graph structure for DFGs
*/
public List<DFGPermutationResult> checkWithGraph() {
logger.info("DFG Variant Analysis start");
Instant start = Instant.now();
logger.info("Pre-processing processing...");
// Encode tasks: using maps operations is too heavy during the permutation, better a matrix with known indices
int tasksNumber = logParser1.getTaskCharArchive().size();
indexToTaskMap = new HashMap<Integer, TaskClass>();
TaskToIndexMap = new HashMap<TaskClass, Integer>();
int taskIndex = 0;
for (TaskChar t : logParser1.getTaskCharArchive().getTaskChars()) {
TaskClass tc = t.taskClass;
indexToTaskMap.put(taskIndex, tc);
TaskToIndexMap.put(tc, taskIndex);
taskIndex++;
}
logger.info("Pre-processing time: " + Duration.between(start, Instant.now()));
logger.info("Permutation test with Graphs and XES parser...");
start = Instant.now();
// structure to store the initial difference
// matrix NxN where N is the number of tasks
DFGEncodedLog eLog1 = new DFGEncodedLog(logParser1);
DFGEncodedLog eLog2 = new DFGEncodedLog(logParser2);
DFG dfg1 = DFG.buildDFGFromEncodedLog(eLog1);
DFG dfg2 = DFG.buildDFGFromEncodedLog(eLog2);
// DFG dfg1 = DFG.buildDFGFromXesLogParser(logParser1);
// DFG dfg2 = DFG.buildDFGFromXesLogParser(logParser2);
float[][][] initialDifferences = compareDFGsGraphs(dfg1, dfg2, tasksNumber);
logger.info("expected safe number of permutations for Multiple Test adjustment: " + (int) (notZeroDiff(initialDifferences) / janusVariantParams.pValue));
// initialize structure to store the intermediate results for all transitions
// matrix NxNx3 where N is the number of tasks and and 3 is the <AVG,MIN,MAX> of the transition
// the structure counts how many time the difference was greater than the one observed initially
int[][][] relevantCounter = new int[tasksNumber][tasksNumber][3];
// Permutation test & significance test
// permutationTestGraphBased(initialDifferences, relevantCounter, tasksNumber, logParser1, logParser2);
permutationTestGraphBasedEncoded(initialDifferences, relevantCounter, tasksNumber, eLog1, eLog2);
List<DFGPermutationResult> result = significanceTestGraph(tasksNumber, initialDifferences, relevantCounter, dfg1, dfg2);
// POST-PROCESSING
logger.info("Required permutations for multiple Test adjustment: " + (int) (result.size() / janusVariantParams.pValue) + " [used:" + janusVariantParams.nPermutations + "]");
if (janusVariantParams.pValueAdjustmentMethod != JanusDFGVariantCmdParameters.PValueAdjustmentMethod.none)
result = pValueAdjustment(result);
logger.info("Permutation test time: " + Duration.between(start, Instant.now()));
return result;
}
/**
* Adjust the pValues of the results of the permutation test to address the Multiple Testing problem.
* Returns the filtered list of results.
*
* @param results
* @return
*/
private List<DFGPermutationResult> pValueAdjustment(List<DFGPermutationResult> results) {
int m = results.size();
if (m / janusVariantParams.pValue > janusVariantParams.nPermutations) {
// the smallest adjusted pValue is pValueThreshold/m, thus the number of permutations must allow to reach such dimensions
logger.warn("Not enough iterations for a safe Multiple Testing adjustment!");
}
logger.info("pValue correction using " + janusVariantParams.pValueAdjustmentMethod + " method...");
// Sort results by pValue
results.sort(Comparator.comparingDouble(o -> o.pValue)); //increasing order
// Compute rank
TreeMap<Double, Integer> rankMap = new TreeMap();
int currentRank = 0;
for (DFGPermutationResult currentResult : results) {
currentRank++;
if (!rankMap.containsKey(currentResult.pValue)) {
rankMap.put(currentResult.pValue, currentRank);
}
}
int removed = 0;
boolean killSwitch = false;
List<DFGPermutationResult> newResults = new LinkedList<>();
switch (janusVariantParams.pValueAdjustmentMethod) {
case hb:
// Holm-Bonferroni
for (DFGPermutationResult currentResult : results) {
if (killSwitch || currentResult.pValue >= janusVariantParams.pValue / (m + 1 - rankMap.get(currentResult.pValue))) {
killSwitch = true;
removed++;
} else {
newResults.add(currentResult);
}
}
break;
case bh:
// Benjamini–Hochberg
for (DFGPermutationResult currentResult : results) {
if (killSwitch || currentResult.pValue >= rankMap.get(currentResult.pValue) / (float) m * janusVariantParams.pValue) {
killSwitch = true;
removed++;
} else {
newResults.add(currentResult);
}
}
break;
default:
throw new IllegalArgumentException("Unknown method code! use HB (Holm–Bonferroni) or BH (Benjamini–Hochberg)");
}
logger.info("Removed " + removed + " of " + m + " results");
return newResults;
}
/**
* Returns the number of non-zero differences for the given differences matrix
*
* @param differenceMatrix
* @return
*/
private int notZeroDiff(float[][][] differenceMatrix) {
int counter = 0;
for (float[][] row : differenceMatrix) {
for (float[] col : row) {
for (float value : col) {
if (value > 0) counter++;
}
}
}
return counter;
}
/**
* Perform the permutation test on the given input graph DFGs
*
* @param initialDifferences
* @param relevantCounter
* @param tasksNumber
* @param logParser1
* @param logParser2
*/
@Deprecated
private void permutationTestGraphBased(float[][][] initialDifferences, int[][][] relevantCounter, int tasksNumber, XesLogParser logParser1, XesLogParser logParser2) {
int log1len = logParser1.length();
int log2len = logParser2.length();
int totLen = log1len + log2len;
// Merge log for shuffling
XesLogParser mergedLogParser = XesLogParser.mergeParsersWithEquivalentTaskChars(logParser1, logParser2);
XesLogParser newLog1Parser;
XesLogParser newLog2Parser;
DFG newDFG1;
DFG newDFG2;
int step = 25;
for (int i = 0; i < janusVariantParams.nPermutations; i++) {
if (!janusViewParams.suppressPermutationStatusPrint && i % step == 0)
System.out.print("\rPermutation: " + i + "/" + janusVariantParams.nPermutations); // Status counter "current trace/total trace"
// Shuffle trace
mergedLogParser.shuffleTraces();
// generate the two new logs
newLog1Parser = (XesLogParser) mergedLogParser.takeASlice(0, log1len);
newLog2Parser = (XesLogParser) mergedLogParser.takeASlice(log1len, totLen);
// compute the differences
newDFG1 = DFG.buildDFGFromXesLogParser(newLog1Parser);
newDFG2 = DFG.buildDFGFromXesLogParser(newLog2Parser);
float[][][] currentDifferences = compareDFGsGraphs(newDFG1, newDFG2, tasksNumber);
checkDifferencesAgainstReferenceGraph(currentDifferences, initialDifferences, relevantCounter);
}
if (!janusViewParams.suppressPermutationStatusPrint)
System.out.println("\rPermutation: " + janusVariantParams.nPermutations + "/" + janusVariantParams.nPermutations);
}
/**
* Perform the permutation test on the given input graph DFGs using an encoded Event Log (around x4 faster than using normal XESParser).
*
* @param initialDifferences
* @param relevantCounter
* @param tasksNumber
* @param logParser1
* @param logParser2
*/
private void permutationTestGraphBasedEncoded(float[][][] initialDifferences, int[][][] relevantCounter, int tasksNumber, DFGEncodedLog logParser1, DFGEncodedLog logParser2) {
int log1len = logParser1.length();
int log2len = logParser2.length();
int totLen = log1len + log2len;
// Merge log for shuffling
DFGEncodedLog mergedLogParser = logParser1.merge(logParser2);
DFG newDFG1;
DFG newDFG2;
int step = 25;
for (int i = 0; i < janusVariantParams.nPermutations; i++) {
if (!janusViewParams.suppressPermutationStatusPrint && i % step == 0)
System.out.print("\rPermutation: " + i + "/" + janusVariantParams.nPermutations); // Status counter "current trace/total trace"
// Shuffle trace
mergedLogParser.shuffleTraces();
// generate the two new logs & compute the differences
newDFG1 = DFG.buildDFGFromEncodedLog(mergedLogParser.traces.subList(0, log1len));
newDFG2 = DFG.buildDFGFromEncodedLog(mergedLogParser.traces.subList(log1len, totLen));
float[][][] currentDifferences = compareDFGsGraphs(newDFG1, newDFG2, tasksNumber);
checkDifferencesAgainstReferenceGraph(currentDifferences, initialDifferences, relevantCounter);
}
if (!janusViewParams.suppressPermutationStatusPrint)
System.out.println("\rPermutation: " + janusVariantParams.nPermutations + "/" + janusVariantParams.nPermutations);
}
/**
* Check the graph-based permutations results and return the significant differences in output
*
* @param tasksNumber
* @param initialDifferences
* @param relevantCounter
* @param dfg1
* @param dfg2
* @return
*/
public List<DFGPermutationResult> significanceTestGraph(int tasksNumber, float[][][] initialDifferences, int[][][] relevantCounter, DFG dfg1, DFG dfg2) {
// Significance test
int nanResultsCounter = 0;
List<DFGPermutationResult> result = new LinkedList<>();
for (int i = 0; i < tasksNumber; i++) {
for (int j = 0; j < tasksNumber; j++) {
boolean flag = false;
for (int k = 0; k < 3; k++) {
float currentPValue = (float) relevantCounter[i][j][k] / janusVariantParams.nPermutations;
if (currentPValue <= janusVariantParams.pValue) {
TaskClass tcI = indexToTaskMap.get(i);
TaskClass tcJ = indexToTaskMap.get(j);
DFGTransition time1 = dfg1.getTransition(tcI, tcJ);
DFGTransition time2 = dfg2.getTransition(tcI, tcJ);
if (time1 == null || time2 == null) {
flag = true;
continue;
}
switch (k) {
case 0:
result.add(new DFGPermutationResult(
indexToTaskMap.get(i).toString(),
indexToTaskMap.get(j).toString(),
"AVG",
currentPValue,
initialDifferences[i][j][k],
time1.getTimeAvg(),
time2.getTimeAvg()
));
break;
case 1:
result.add(new DFGPermutationResult(
indexToTaskMap.get(i).toString(),
indexToTaskMap.get(j).toString(),
"MIN",
currentPValue,
initialDifferences[i][j][k],
time1.getTimeMin(),
time2.getTimeMin()
));
break;
case 2:
result.add(new DFGPermutationResult(
indexToTaskMap.get(i).toString(),
indexToTaskMap.get(j).toString(),
"MAX",
currentPValue,
initialDifferences[i][j][k],
time1.getTimeMax(),
time2.getTimeMax()
));
break;
}
}
}
if (flag) {
nanResultsCounter++;
}
}
}
logger.info("NaN Relevant differences: " + nanResultsCounter);
return result;
}
/**
* Check the difference of the given graph-based DFG difference and update the counters
*
* @param currentDifferences
* @param initialDifferences
* @param relevantCounter
*/
private void checkDifferencesAgainstReferenceGraph(float[][][] currentDifferences, float[][][] initialDifferences, int[][][] relevantCounter) {
int tNum = currentDifferences.length;
for (int i = 0; i < tNum; i++) {
for (int j = 0; j < tNum; j++) {
for (int k = 0; k < 3; k++) {
if (Float.isNaN(currentDifferences[i][j][k]) && !Float.isNaN(initialDifferences[i][j][k]))
relevantCounter[i][j][k]++;
else if (!Float.isNaN(currentDifferences[i][j][k]) && Float.isNaN(initialDifferences[i][j][k]))
relevantCounter[i][j][k]++;
else if (currentDifferences[i][j][k] >= initialDifferences[i][j][k])
relevantCounter[i][j][k]++;
}
}
}
}
/**
* Compare two DFGs and return the dime differences between their transition considering <AVG,MIN,MAX>
* The union of all the task is considered.
* The result is a NxNx3 matrix where N is the number of tasks and 3 are the aforementioned <AVG,MIN,MAX> of the transition
*
* @param dfg1
* @param dfg2
* @return
*/
private float[][][] compareDFGsGraphs(DFG dfg1, DFG dfg2, int tasksTotalNumber) {
float[][][] result = new float[tasksTotalNumber][tasksTotalNumber][3];
for (int i = 0; i < tasksTotalNumber; i++) {
TaskClass tcI = indexToTaskMap.get(i);
for (int j = 0; j < tasksTotalNumber; j++) {
TaskClass tcJ = indexToTaskMap.get(j);
DFGTransition time1 = dfg1.getTransition(tcI, tcJ);
DFGTransition time2 = dfg2.getTransition(tcI, tcJ);
if (time1 != null & time2 != null) {
result[i][j][0] = Math.abs(time1.getTimeAvg() - time2.getTimeAvg());
result[i][j][1] = Math.abs(time1.getTimeMin() - time2.getTimeMin());
result[i][j][2] = Math.abs(time1.getTimeMax() - time2.getTimeMax());
} else if (time1 == null & time2 == null) {
result[i][j][0] = 0.0F;
result[i][j][1] = 0.0F;
result[i][j][2] = 0.0F;
// } else if (time1 == null & time2 != null) {
// } else if (time1 != null & time2 == null) {
} else {
result[i][j][0] = Float.NaN;
result[i][j][1] = Float.NaN;
result[i][j][2] = Float.NaN;
}
}
}
return result;
}
} | 17,886 | 43.384615 | 181 | java |
Janus | Janus-master/src/minerful/reactive/variant/ReactiveVariantAnalysisCore.java | package minerful.reactive.variant;
import com.google.common.collect.Ordering;
import com.google.common.collect.TreeMultimap;
import minerful.concept.ProcessModel;
import minerful.logparser.LogParser;
import minerful.logparser.LogTraceParser;
import minerful.reactive.measurements.MegaMatrixMonster;
import minerful.reactive.measurements.ReactiveMeasurementsOfflineQueryingCore;
import minerful.reactive.params.JanusMeasurementsCmdParameters;
import minerful.reactive.params.JanusVariantCmdParameters;
import minerful.reactive.params.JanusPrintParameters;
import org.apache.log4j.Logger;
import java.util.*;
/**
* Class to organize the variant analysis
*/
public class ReactiveVariantAnalysisCore {
protected static Logger logger;
private final LogParser logParser_1; // original log1 parser
private final ProcessModel processSpecification1; // original set of constraints mined from log1
private final LogParser logParser_2; // original log2 parser
private final ProcessModel processSpecification2; // original set of constraints mined from log2
private final JanusVariantCmdParameters janusVariantParams; // input parameter of the analysis
private final JanusPrintParameters janusViewParams; // print behaciours parameters
private float[][] lCodedIndex; // encoded log for efficient permutations. only constraints and traces indices are used
private int processSpecificationUnionSize; // number of constraints in the specification union
// RESULTS
private Map<String, Float> spec1; // constraint->log measure, measurement of the union model over the first variant
private Map<String, Float> spec2; // constraint->log measure, measurement of the union model over the second variant
//
public static final Map<String, String[]> HIERARCHY = new HashMap<String, String[]>() {{
put("Participation", new String[]{});
put("RespondedExistence", new String[]{"Participation($2)"}); //this link is ok only if the simplification works with equivalences of measures, otherwise it is not direct
put("CoExistence", new String[]{"RespondedExistence($1,$2)", "RespondedExistence($2,$1)"});
put("Succession", new String[]{"Response($1,$2)", "Precedence($1,$2)", "CoExistence($1,$2)"});
put("Precedence", new String[]{"RespondedExistence($2,$1)"});
put("Response", new String[]{"RespondedExistence($1,$2)"});
put("AlternateSuccession", new String[]{"AlternateResponse($1,$2)", "AlternatePrecedence($1,$2)", "Succession($1,$2)"});
put("AlternatePrecedence", new String[]{"Precedence($1,$2)"});
put("AlternateResponse", new String[]{"Response($1,$2)"});
put("ChainSuccession", new String[]{"ChainResponse($1,$2)", "ChainPrecedence($1,$2)", "AlternateSuccession($1,$2)"});
put("ChainPrecedence", new String[]{"AlternatePrecedence($1,$2)"});
put("ChainResponse", new String[]{"AlternateResponse($1,$2)"});
put("NotCoExistence", new String[]{});
put("NotSuccession", new String[]{"NotCoExistence($1,$2)"});
put("NotChainSuccession", new String[]{"NotSuccession($1,$2)"});
}}; // TODO only direct derivation for now, implement also simplification from combination of rules
private Map<Integer, String> indexToConstraintMap;
private Map<String, Integer> constraintToIndexMap;
private List<Integer> permutableTracesIndexList;
{
if (logger == null) {
logger = Logger.getLogger(ReactiveMeasurementsOfflineQueryingCore.class.getCanonicalName());
}
}
/**
* Constructor
*
* @param logParser_1
* @param logParser_2
* @param janusVariantParams
*/
public ReactiveVariantAnalysisCore(LogParser logParser_1, ProcessModel processSpecification1, LogParser logParser_2, ProcessModel processSpecification2, JanusVariantCmdParameters janusVariantParams, JanusPrintParameters janusViewParams) {
this.logParser_1 = logParser_1;
this.processSpecification1 = processSpecification1;
this.logParser_2 = logParser_2;
this.processSpecification2 = processSpecification2;
this.janusVariantParams = janusVariantParams;
this.janusViewParams = janusViewParams;
}
/**
* Launcher for variant analysis of two logs
*
* @return
*/
public Map<String, Float> check() {
logger.info("Variant Analysis start");
// PREPROCESSING
double before = System.currentTimeMillis();
// 1. Models differences
// NOTE USED FOR NOW
// setModelsDifferences(processSpecification1, processSpecification2);
// 2. Models Union (total set of rules to check afterwards)
// setModelsUnion(processSpecification1, processSpecification2);
// total set of constraints to analyse, i.e., union of process specification 1 and 2
ProcessModel processSpecificationUnion = ProcessModel.union(processSpecification1, processSpecification2);
processSpecificationUnionSize = processSpecificationUnion.howManyConstraints();
// 3. Encode log (create efficient log structure for the permutations)
// 4. Precompute all possible results for the Encoded Log
encodeLogsIndex(logParser_1, logParser_2, processSpecificationUnion);
double after = System.currentTimeMillis();
logger.info("Pre-processing time: " + (after - before));
// PERMUTATION TEST
before = System.currentTimeMillis();
logger.info("Permutations processing...");
int nPermutations;
if (janusVariantParams.nPermutations <= 0) {
nPermutations = (int) (processSpecificationUnionSize / janusVariantParams.pValue);
logger.info("Number of required permutations: " + nPermutations);
// TODO check that this number does not go beyond the possible permutations (unlikely, but theoretically possible)
} else {
nPermutations = janusVariantParams.nPermutations;
}
if (processSpecificationUnionSize / janusVariantParams.pValue > nPermutations) {
// the smallest adjusted pValue is pValueThreshold/results.size(), thus the number of permutations must allow to reach such dimensions.
// the worst case scenario is when all the hypotheses/constraints are statistically relevant
logger.warn("Possible low number of iterations for a sound Multiple Testing adjustments! used:" + nPermutations + " safe upperbound expected:" + (int) (processSpecificationUnionSize / janusVariantParams.pValue));
}
Map<String, Float> results = permuteResultsIndex(nPermutations, true);
after = System.currentTimeMillis();
// POST-PROCESSING
logger.info(" Permutations used:" + nPermutations + " minimum requirement for pValue adjustment:" + (int) (results.size() / janusVariantParams.pValue));
if (janusVariantParams.pValueAdjustmentMethod != JanusVariantCmdParameters.PValueAdjustmentMethod.none) {
if (results.size() / janusVariantParams.pValue > nPermutations) {
// the smallest adjusted pValue is pValueThreshold/results.size(), thus the number of permutations must allow to reach such dimensions
logger.warn("Not enough iterations for a sound Multiple Testing adjustments!");
}
pValueAdjustment(results, janusVariantParams.pValue, janusVariantParams.pValueAdjustmentMethod);
}
logger.info("Permutation test time: " + (after - before));
return results;
}
/**
* SIDE-EFFECT on result parameter!
* <p>
* A pValue correction method is applied on the result of the permutation test to mitigate the multiple testing problem.
* <p>
* The implemented methods are:
* HB: Holm–Bonferroni (no assumptions, controls the FWER, more strict)
* BH: Benjamini–Hochberg (independence or certain types of positive dependence, controls the FDR, more relaxed)
*
* @param results
* @param pValueThreshold
* @param method
*/
private void pValueAdjustment(Map<String, Float> results, double pValueThreshold, JanusVariantCmdParameters.PValueAdjustmentMethod method) {
logger.info("pValue adjustment using " + method + " correction method...");
int m = results.size();
// Sort results by pValue
TreeMultimap<Float, String> sortedPvaluesResults = TreeMultimap.create(Ordering.natural(), Ordering.usingToString());
for (String constraint : results.keySet()) {
sortedPvaluesResults.put(results.get(constraint), constraint);
}
// Compute rank
Map<Float, Integer> rankMap = new TreeMap();
int currentRank = 0;
for (float currentPvalue : sortedPvaluesResults.keySet()) {
for (String line : sortedPvaluesResults.get(currentPvalue)) {
currentRank++;
if (!rankMap.containsKey(currentPvalue)) {
rankMap.put(currentPvalue, currentRank);
}
}
}
int removed = 0;
boolean killSwitch = false;
switch (method) {
case hb:
// Holm-Bonferroni
for (float currentPvalue : sortedPvaluesResults.keySet()) {
if (killSwitch || currentPvalue >= pValueThreshold / (m + 1 - rankMap.get(currentPvalue))) {
killSwitch = true;
for (String constraint : sortedPvaluesResults.get(currentPvalue)) {
results.remove(constraint);
removed++;
}
}
}
break;
case bh:
// Benjamini–Hochberg
for (float currentPvalue : sortedPvaluesResults.keySet()) {
if (killSwitch || currentPvalue >= rankMap.get(currentPvalue) / (float) m * pValueThreshold) {
killSwitch = true;
for (String constraint : sortedPvaluesResults.get(currentPvalue)) {
results.remove(constraint);
removed++;
}
}
}
break;
default:
throw new IllegalArgumentException("Unknown method code! use HB (Holm–Bonferroni) or BH (Benjamini–Hochberg)");
}
logger.info("Removed " + removed + " of " + m + " results");
}
/**
* Permutation test in which is taken the encoded results and
* check of the significance of the permutation test results
*
* @param nPermutations
* @param nanCheck
* @return Map from constraint to its pValue
*/
private Map<String, Float> permuteResultsIndex(int nPermutations, boolean nanCheck) {
int nConstraints = processSpecificationUnionSize;
int log1Size = logParser_1.length();
int log2Size = logParser_2.length();
logger.info("[Tot traces:" + (log1Size + log2Size) + " Constraints:" + processSpecificationUnionSize + "]");
// List<Integer> permutableTracesIndexList = new ArrayList<>();
// for (Iterator<LogTraceParser> it = logParser_1.traceIterator(); it.hasNext(); ) {
// permutableTracesIndexList.add(traceToIndexMap.get(it.next().printStringTrace()));
// }
// for (Iterator<LogTraceParser> it = logParser_2.traceIterator(); it.hasNext(); ) {
// permutableTracesIndexList.add(traceToIndexMap.get(it.next().printStringTrace()));
// }
float[] pValues = new float[nConstraints];
Set<Integer> blackList = new HashSet();
float[] result1 = new float[nConstraints];
float[] result2 = new float[nConstraints];
float[] initialDifference = new float[nConstraints];
int step = 25;
for (int i = 0; i < nPermutations; i++) {
if (!janusViewParams.suppressPermutationStatusPrint && i % step == 0)
System.out.print("\rPermutation: " + i + "/" + nPermutations); // Status counter "current trace/total trace"
for (int c = 0; c < nConstraints; c++) {
if (!janusVariantParams.oKeep && blackList.contains(c))
continue;
int traceIndex = -1;
int nanTraces1 = 0;
int nanTraces2 = 0;
for (int t : permutableTracesIndexList) {
traceIndex++;
if (traceIndex < log1Size) {
if (nanCheck & Float.isNaN(lCodedIndex[t][c])) {
nanTraces1++;
continue; // TODO expose in input
}
result1[c] += lCodedIndex[t][c];
} else {
if (nanCheck & Float.isNaN(lCodedIndex[t][c])) {
nanTraces2++;
continue; // TODO expose in input
}
result2[c] += lCodedIndex[t][c];
}
}
result1[c] = result1[c] / (log1Size - nanTraces1);
result2[c] = result2[c] / (log2Size - nanTraces2);
if (i == 0) initialDifference[c] = Math.abs(result1[c] - result2[c]);
if (Math.abs(result1[c] - result2[c]) >= initialDifference[c]) {
pValues[c] += 1.0;
}
if (!janusVariantParams.oKeep && (pValues[c] / nPermutations) > janusVariantParams.pValue)
blackList.add(c); //if the constraints present a pValues greater than the threshold before the end of the permutations, we can discard it immediately
result1[c] = 0.0f;
result2[c] = 0.0f;
}
// permutation "0" are the original logs
Collections.shuffle(permutableTracesIndexList);
}
if (!janusViewParams.suppressPermutationStatusPrint) {
System.out.print("\rPermutation: " + nPermutations + "/" + nPermutations);
System.out.println();
}
// Significance test in case of NEGATIVE/POSITIVE DISTANCE
logger.info("Significance testing...");
Map<String, Float> result = new HashMap<String, Float>(); // constraint: pValue
for (int cIndex = 0; cIndex < nConstraints; cIndex++) {
pValues[cIndex] = pValues[cIndex] / nPermutations;
if (janusVariantParams.oKeep || pValues[cIndex] <= janusVariantParams.pValue) {
result.put(indexToConstraintMap.get(cIndex), pValues[cIndex]);
}
}
logger.info("Rules Number: " + nConstraints + " ; relevant: " + result.size() + " ; non-relevant: " + (nConstraints - result.size()));
return result;
}
/**
* Encode the input traces for efficient permutation.
* the result is a Map where the keys are the hash of the traces and the content in another map with key:value as constrain:measure.
* In this way we check only here the constraints in each trace and later we permute only the results
* <p>
* Transform the encoded map into a matrix where traces and constraints are referred by indices.
* compute the encoding and return the reference mappings
*
* @param model
* @param logParser_1
* @param logParser_2
*/
private void encodeLogsIndex(LogParser logParser_1, LogParser logParser_2, ProcessModel model) {
// encode index
lCodedIndex = new float[logParser_1.length() + logParser_2.length()][processSpecificationUnionSize]; // lCodedIndex[trace index][constraint index]
indexToConstraintMap = new HashMap<>();
constraintToIndexMap = new HashMap<>();
// encode
encodeLog(logParser_1, model, 0);
encodeLog(logParser_2, model, logParser_1.length());
Set<String> constraintsRemovalCandidate = new HashSet<>();
Set<String> constraintsList = constraintToIndexMap.keySet();
// hierarchical simplification
if (janusVariantParams.simplify) {
logger.info("Rules simplification...");
for (String c : constraintsList) {
if (constraintsRemovalCandidate.contains(c)) continue;
String template = c.split("\\(")[0];
// skip constraints with only one variable from simplification
if (c.contains(",") == false || HIERARCHY.get(template) == null) continue;
String cVar1 = c.split("\\(")[1].replace(")", "").split(",")[0];
String cVar2 = c.split("\\(")[1].replace(")", "").split(",")[1];
for (String d : HIERARCHY.get(template)) {
String derived = d.replace("$1", cVar1).replace("$2", cVar2);
if (constraintsList.contains(derived)) {
if (spec1.get(derived) - spec1.get(c) == 0 || spec2.get(derived) - spec2.get(c) == 0) {
constraintsRemovalCandidate.add(c);
}
}
}
}
logger.info("Number of simplified constraints: " + (processSpecificationUnionSize - constraintsRemovalCandidate.size()));
// simplification of symmetric constraints [CoExistence, NotCoExistence]
int initConstrNum = processSpecificationUnionSize - constraintsRemovalCandidate.size();
for (String c : constraintsList) {
if (constraintsRemovalCandidate.contains(c)) continue;
// skip constraints with only one variable from simplification
if (!c.contains(",")) continue;
String template = c.split("\\(")[0];
// only symmetric constraints
if (!template.equals("CoExistence") && !template.equals("NotCoExistence")) continue;
// skip constraints already labelled for removal
if (constraintsRemovalCandidate.contains(c)) continue;
String cVar1 = c.split("\\(")[1].replace(")", "").split(",")[0];
String cVar2 = c.split("\\(")[1].replace(")", "").split(",")[1];
String symmetricConstraint = template + "(" + cVar2 + "," + cVar1 + ")";
if (constraintsList.contains(symmetricConstraint)) constraintsRemovalCandidate.add(symmetricConstraint);
}
logger.info("Number of simplified symmetric constraints: " + (initConstrNum - (processSpecificationUnionSize - constraintsRemovalCandidate.size())));
}
// difference min cut
if (!janusVariantParams.oKeep) {
logger.info("Removing rules with not enough initial difference...");
int initConstrNum = processSpecificationUnionSize - constraintsRemovalCandidate.size();
for (String c : constraintsList) {
if (constraintsRemovalCandidate.contains(c)) continue;
float difference = Math.abs(spec1.get(c) - spec2.get(c));
// if one is NaN, the rule is removed if the non-NaN value is below the difference threshold (like if NaN=0)
if ((difference < janusVariantParams.differenceThreshold) ||
(Float.isNaN(spec1.get(c)) && spec2.get(c) < janusVariantParams.differenceThreshold) ||
(Float.isNaN(spec2.get(c)) && spec1.get(c) < janusVariantParams.differenceThreshold)) {
constraintsRemovalCandidate.add(c);
}
}
logger.info("Number of removed constraints: " + (initConstrNum - (processSpecificationUnionSize - constraintsRemovalCandidate.size())));
}
// Measures below threshold
if (janusVariantParams.measureThreshold > 0) {
logger.info("Removing rules below threshold in both variants...");
int initConstrNum = processSpecificationUnionSize - constraintsRemovalCandidate.size();
String[] initialConstraintsList = new String[spec1.size()];
spec1.keySet().toArray(initialConstraintsList);
for (String c : constraintsList) {
if (constraintsRemovalCandidate.contains(c)) continue;
if (spec1.get(c) < janusVariantParams.measureThreshold && spec2.get(c) < janusVariantParams.measureThreshold) {
constraintsRemovalCandidate.add(c);
}
}
logger.info("Number of removed constraints: " + (initConstrNum - (processSpecificationUnionSize - constraintsRemovalCandidate.size())));
}
// remove selected constraints and update the constraints indices map
Set<Integer> constraintsRemovalCandidateIndices = new HashSet<>();
for (String c : constraintsRemovalCandidate) {
spec1.remove(c);
spec2.remove(c);
constraintsRemovalCandidateIndices.add(constraintToIndexMap.get(c));
processSpecificationUnionSize--;
}
float[][] res = new float[lCodedIndex.length][lCodedIndex[0].length - constraintsRemovalCandidateIndices.size()];
for (int t = 0; t < lCodedIndex.length; t++) {
int i = 0;
for (int c = 0; c < lCodedIndex[t].length; c++) {
if (constraintsRemovalCandidateIndices.contains(c)) continue;
res[t][i] = lCodedIndex[t][c];
i++;
}
}
Map<Integer, String> newIndexToConstraintMap = new HashMap<>();
Map<String, Integer> newConstraintToindexMap = new HashMap<>();
int currentIndex = 0;
for (int c = 0; c < lCodedIndex[0].length; c++) {
if (constraintsRemovalCandidateIndices.contains(c)) continue;
newIndexToConstraintMap.put(currentIndex, indexToConstraintMap.get(c));
newConstraintToindexMap.put(indexToConstraintMap.get(c), currentIndex);
currentIndex++;
}
lCodedIndex = res;
indexToConstraintMap = newIndexToConstraintMap;
constraintToIndexMap = newConstraintToindexMap;
permutableTracesIndexList = new ArrayList<>();
Map<String, Integer> traceToIndexMap = new HashMap<>();
int currentTrace = 0;
for (Iterator<LogTraceParser> it = logParser_1.traceIterator(); it.hasNext(); ) {
LogTraceParser tr = it.next();
String stringTrace = tr.printStringTrace();
traceToIndexMap.put(stringTrace, currentTrace);
currentTrace++;
}
for (Iterator<LogTraceParser> it = logParser_2.traceIterator(); it.hasNext(); ) {
LogTraceParser tr = it.next();
String stringTrace = tr.printStringTrace();
traceToIndexMap.put(stringTrace, currentTrace);
currentTrace++;
}
for (Iterator<LogTraceParser> it = logParser_1.traceIterator(); it.hasNext(); ) {
permutableTracesIndexList.add(traceToIndexMap.get(it.next().printStringTrace()));
}
for (Iterator<LogTraceParser> it = logParser_2.traceIterator(); it.hasNext(); ) {
permutableTracesIndexList.add(traceToIndexMap.get(it.next().printStringTrace()));
}
}
/**
* Precompute the evaluation and encode a map where each distinct trace is linked to all the constraints measumentents
*
* @param logParser
* @param model
* @param resultIndex index from which to append the computed measures into the encoded result matrix
* @return
*/
private void encodeLog(LogParser logParser, ProcessModel model, int resultIndex) {
JanusMeasurementsCmdParameters janusCheckingParams = new JanusMeasurementsCmdParameters(false, 0, true, false);
ReactiveMeasurementsOfflineQueryingCore reactiveMeasurementsOfflineQueryingCore = new ReactiveMeasurementsOfflineQueryingCore(
0, logParser, janusCheckingParams, janusViewParams, null, logParser.getTaskCharArchive(), null, model.bag);
double before = System.currentTimeMillis();
MegaMatrixMonster measures = reactiveMeasurementsOfflineQueryingCore.check();
int constraintsNum = measures.getConstraintsNumber() - 1;
double after = System.currentTimeMillis();
logger.info("Total KB checking time: " + (after - before));
// compute only the desired measure
float[][] tracesMeasure = measures.retrieveSingleTraceMeasures(
janusVariantParams.measure,
janusCheckingParams.nanTraceSubstituteFlag,
janusCheckingParams.nanTraceSubstituteValue);
// constraints indices map
int ic = constraintToIndexMap.size();
for (String constraint : measures.getConstraintsNames()) {
if (!constraintToIndexMap.containsKey(constraint)) {
constraintToIndexMap.put(constraint, ic);
indexToConstraintMap.put(ic, constraint);
ic++;
}
}
// fill result
for (int currentTrace = 0; currentTrace < logParser.length(); currentTrace++) {
System.arraycopy(tracesMeasure[currentTrace], 0, lCodedIndex[currentTrace + resultIndex], 0, constraintsNum);
}
// save log measures
Map<String, Float> logMeasures = new HashMap<>(); // constraint->measurement
boolean nanCheck = true;
for (int c = 0; c < constraintsNum; c++) {
int nanTraces = 0;
float constraintResult = 0;
for (int t = 0; t < logParser.length(); t++) {
if (nanCheck & Float.isNaN(tracesMeasure[t][c])) {
nanTraces++;
continue; // TODO expose in input
}
constraintResult += tracesMeasure[t][c];
}
constraintResult = constraintResult / (logParser.length() - nanTraces);
logMeasures.put(indexToConstraintMap.get(c), constraintResult);
}
if (resultIndex > 0) {
spec2 = logMeasures;
} else {
spec1 = logMeasures;
}
}
/**
* Get the log level measurement of a given log parser using already encoded log measurements
* *
*
* @param nanCheck
* @return Map<String, Float> constraint-name:measurement
*/
private Map<String, Float> getMeasurementsOfOneVariant(boolean nanCheck, LogParser logParser, Map<String, Map<String, Float>> lCoded) {
Map<String, Float> result = new HashMap<>(); // constraint->measurement
int logSize = logParser.length();
List<String> permutableTracesList = new LinkedList<>();
for (Iterator<LogTraceParser> it = logParser.traceIterator(); it.hasNext(); ) {
permutableTracesList.add(it.next().printStringTrace());
}
Set<String> constraints = lCoded.values().iterator().next().keySet();
for (String c : constraints) {
int nanTraces = 0;
float constraintResult = 0;
for (String t : permutableTracesList) {
if (nanCheck & Float.isNaN(lCoded.get(t).get(c))) {
nanTraces++;
continue; // TODO expose in input
}
constraintResult += lCoded.get(t).get(c);
}
constraintResult = constraintResult / (logSize - nanTraces);
result.put(c, constraintResult);
}
return result;
}
/**
* the first variant
* Get the original log level measurement of the first variant
*
* @param nanCheck
* @return Map<String, Float> constraint-name:measurement
*/
public Map<String, Float> getMeasurementsVar1(boolean nanCheck) {
return spec1;
}
/**
* Get the original log level measurement of the second variant
*
* @param nanCheck
* @return Map<String, Float> constraint-name:measurement
*/
public Map<String, Float> getMeasurementsVar2(boolean nanCheck) {
return spec2;
}
}
| 28,138 | 47.183219 | 242 | java |
Janus | Janus-master/src/minerful/relevance/test/constraint/SequenceResponse21.java | package minerful.relevance.test.constraint;
import java.util.Set;
import java.util.TreeSet;
import dk.brics.automaton.State;
import dk.brics.automaton.Transition;
import minerful.automaton.concept.relevance.RelevanceAwareTransition;
import minerful.automaton.concept.relevance.TransitionRelevance;
import minerful.automaton.concept.relevance.VacuityAwareWildcardAutomaton;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintFamily;
import minerful.concept.constraint.ConstraintFamily.ConstraintSubFamily;
import minerful.io.encdec.TaskCharEncoderDecoder;
public class SequenceResponse21 extends Constraint {
public SequenceResponse21(TaskChar param1, TaskChar param2, TaskChar param3) {
super();
this.parameters.add(new TaskCharSet(param1));
this.parameters.add(new TaskCharSet(param2));
this.parameters.add(new TaskCharSet(param3));
}
@Override
public String getRegularExpressionTemplate() {
return "([^%1$s]*)|([^%1$s]*%1$s[^%2$s]*)|([^%1$s]*%1$s[^%2$s]*%2$s[^%3$s]*%3$s[^%1$s]*)";
// "([^A]*)|([^A]*A[^B]*)|([^A]*A[^B]*B[^X]*X[^A]*)";
}
@Override
public String getRegularExpression() {
return String.format(this.getRegularExpressionTemplate(),
this.parameters.get(0).toPatternString(),
this.parameters.get(1).toPatternString(),
this.parameters.get(2).toPatternString()
);
}
@Override
public TaskCharSet getImplied() {
return null;
}
@Override
public Constraint suggestConstraintWhichThisShouldBeBasedUpon() {
return null;
}
@Override
public Constraint copy(TaskChar... taskChars) {
this.checkParams(taskChars);
return new SequenceResponse21(taskChars[0], taskChars[1], taskChars[2]);
}
@Override
public Constraint copy(TaskCharSet... taskCharSets) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean checkParams(TaskChar... taskChars)
throws IllegalArgumentException {
return true;
}
@Override
public boolean checkParams(TaskCharSet... taskCharSets)
throws IllegalArgumentException {
return true;
}
@Override
public ConstraintFamily getFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
public <T extends ConstraintSubFamily> T getSubFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
public int compareTo(Constraint t) {
int result = super.compareTo(t);
if (result == 0) {
result = this.getClass().getCanonicalName().compareTo(t.getClass().getCanonicalName());
}
return result;
}
@Override
public boolean isBranched() {
return false;
}
@Override
public VacuityAwareWildcardAutomaton getCheckAutomaton() {
VacuityAwareWildcardAutomaton autom = new VacuityAwareWildcardAutomaton(this.toString(),
this.getRegularExpression(), TaskCharEncoderDecoder.getTranslationMap(this.getInvolvedTaskChars()));
// TODO Tweaking to insert the loopback to the initial state
State state = autom.getInitialState();
int i = 0;
for (; i < this.getParameters().size() - 1; i++) {
state = state.step(this.parameters.get(i).getFirstTaskChar().identifier);
}
State stateToRemove = state.step(this.parameters.get(i).getFirstTaskChar().identifier);
state.getTransitions().remove(new Transition(this.parameters.get(i).getFirstTaskChar().identifier, stateToRemove));
RelevanceAwareTransition newTransition = new RelevanceAwareTransition(this.parameters.get(i).getFirstTaskChar().identifier, autom.getInitialState(), this.parameters.get(i).getFirstTaskChar().taskClass.toString());
newTransition.setRelevance(TransitionRelevance.RELEVANT);
state.addTransition(newTransition);
return autom;
}
}
| 3,744 | 29.950413 | 215 | java |
Janus | Janus-master/src/minerful/relevance/test/constraint/SequenceResponse22.java | package minerful.relevance.test.constraint;
import java.util.Set;
import java.util.TreeSet;
import dk.brics.automaton.State;
import dk.brics.automaton.Transition;
import minerful.automaton.concept.relevance.RelevanceAwareTransition;
import minerful.automaton.concept.relevance.TransitionRelevance;
import minerful.automaton.concept.relevance.VacuityAwareWildcardAutomaton;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintFamily;
import minerful.concept.constraint.ConstraintFamily.ConstraintSubFamily;
import minerful.io.encdec.TaskCharEncoderDecoder;
public class SequenceResponse22 extends Constraint {
public SequenceResponse22(TaskChar param1, TaskChar param2, TaskChar param3, TaskChar param4) {
super();
this.parameters.add(new TaskCharSet(param1));
this.parameters.add(new TaskCharSet(param2));
this.parameters.add(new TaskCharSet(param3));
this.parameters.add(new TaskCharSet(param4));
}
@Override
public String getRegularExpressionTemplate() {
return "([^%1$s]*)|([^%1$s]*%1$s[^%2$s]*)|([^%1$s]*%1$s[^%2$s]*%2$s[^%3$s]*%3$s[^%4$s]*%4$s[^%1$s]*)";
// "([^A]*)|([^A]*A[^B]*)|([^A]*A[^B]*B[^X]*X[^Y]*Y[^A]*)";
// "([^A]*)|([^A]*A[^B]*)|([^A]*A[^B]*B*X[^Y]*Y[^A]*)"
}
@Override
public String getRegularExpression() {
return String.format(this.getRegularExpressionTemplate(),
this.parameters.get(0).toPatternString(),
this.parameters.get(1).toPatternString(),
this.parameters.get(2).toPatternString(),
this.parameters.get(3).toPatternString()
);
}
@Override
public TaskCharSet getImplied() {
return null;
}
@Override
public Constraint suggestConstraintWhichThisShouldBeBasedUpon() {
return null;
}
@Override
public Constraint copy(TaskChar... taskChars) {
this.checkParams(taskChars);
return new SequenceResponse22(taskChars[0], taskChars[1], taskChars[2], taskChars[3]);
}
@Override
public Constraint copy(TaskCharSet... taskCharSets) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean checkParams(TaskChar... taskChars)
throws IllegalArgumentException {
return true;
}
@Override
public boolean checkParams(TaskCharSet... taskCharSets)
throws IllegalArgumentException {
return true;
}
@Override
public ConstraintFamily getFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
public <T extends ConstraintSubFamily> T getSubFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
public int compareTo(Constraint t) {
int result = super.compareTo(t);
if (result == 0) {
result = this.getClass().getCanonicalName().compareTo(t.getClass().getCanonicalName());
}
return result;
}
@Override
public boolean isBranched() {
return false;
}
@Override
public VacuityAwareWildcardAutomaton getCheckAutomaton() {
VacuityAwareWildcardAutomaton autom = new VacuityAwareWildcardAutomaton(this.toString(),
this.getRegularExpression(), TaskCharEncoderDecoder.getTranslationMap(this.getInvolvedTaskChars()));
// TODO Tweaking to insert the loopback to the initial state
State state = autom.getInitialState();
int i = 0;
for (; i < this.getParameters().size() - 1; i++) {
state = state.step(this.parameters.get(i).getFirstTaskChar().identifier);
}
State stateToRemove = state.step(this.parameters.get(i).getFirstTaskChar().identifier);
state.getTransitions().remove(new Transition(this.parameters.get(i).getFirstTaskChar().identifier, stateToRemove));
RelevanceAwareTransition newTransition = new RelevanceAwareTransition(this.parameters.get(i).getFirstTaskChar().identifier, autom.getInitialState(), this.parameters.get(i).getFirstTaskChar().taskClass.toString());
newTransition.setRelevance(TransitionRelevance.RELEVANT);
state.addTransition(newTransition);
return autom;
}
}
| 3,946 | 30.830645 | 215 | java |
Janus | Janus-master/src/minerful/relevance/test/constraint/SequenceResponse32.java | package minerful.relevance.test.constraint;
import java.util.Set;
import java.util.TreeSet;
import dk.brics.automaton.State;
import dk.brics.automaton.Transition;
import minerful.automaton.concept.relevance.RelevanceAwareTransition;
import minerful.automaton.concept.relevance.TransitionRelevance;
import minerful.automaton.concept.relevance.VacuityAwareWildcardAutomaton;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintFamily;
import minerful.concept.constraint.ConstraintFamily.ConstraintSubFamily;
import minerful.io.encdec.TaskCharEncoderDecoder;
public class SequenceResponse32 extends Constraint {
public SequenceResponse32(TaskChar param1, TaskChar param2, TaskChar param3, TaskChar param4, TaskChar param5) {
super();
this.parameters.add(new TaskCharSet(param1));
this.parameters.add(new TaskCharSet(param2));
this.parameters.add(new TaskCharSet(param3));
this.parameters.add(new TaskCharSet(param4));
this.parameters.add(new TaskCharSet(param5));
}
@Override
public String getRegularExpressionTemplate() {
return "([^%1$s]*)|([^%1$s]*%1$s[^%2$s]*)|([^%1$s]*%1$s[^%2$s]*%2$s[^%3$s]*)|([^%1$s]*%1$s[^%2$s]*%2$s[^%3$s]*%3$s[^%4$s]*%4$s[^%5$s]*%5$s[^%1$s]*)";
// "([^A]*)|([^A]*A[^B]*)|([^A]*A[^B]*B[^C]*)|([^A]*A[^B]*B[^C]*C[^X]*X[^Y]*Y[^A]*)";
}
@Override
public String getRegularExpression() {
return String.format(this.getRegularExpressionTemplate(),
this.parameters.get(0).toPatternString(),
this.parameters.get(1).toPatternString(),
this.parameters.get(2).toPatternString(),
this.parameters.get(3).toPatternString(),
this.parameters.get(4).toPatternString()
);
}
@Override
public TaskCharSet getImplied() {
return null;
}
@Override
public Constraint suggestConstraintWhichThisShouldBeBasedUpon() {
return null;
}
@Override
public Constraint copy(TaskChar... taskChars) {
this.checkParams(taskChars);
return new SequenceResponse32(taskChars[0], taskChars[1], taskChars[2], taskChars[3], taskChars[4]);
}
@Override
public Constraint copy(TaskCharSet... taskCharSets) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean checkParams(TaskChar... taskChars)
throws IllegalArgumentException {
return true;
}
@Override
public boolean checkParams(TaskCharSet... taskCharSets)
throws IllegalArgumentException {
return true;
}
@Override
public ConstraintFamily getFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
public <T extends ConstraintSubFamily> T getSubFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
public int compareTo(Constraint t) {
int result = super.compareTo(t);
if (result == 0) {
result = this.getClass().getCanonicalName().compareTo(t.getClass().getCanonicalName());
}
return result;
}
@Override
public boolean isBranched() {
return false;
}
@Override
public VacuityAwareWildcardAutomaton getCheckAutomaton() {
VacuityAwareWildcardAutomaton autom = new VacuityAwareWildcardAutomaton(this.toString(),
this.getRegularExpression(), TaskCharEncoderDecoder.getTranslationMap(this.getInvolvedTaskChars()));
// TODO Tweaking to insert the loopback to the initial state
State state = autom.getInitialState();
int i = 0;
for (; i < this.getParameters().size() - 1; i++) {
state = state.step(this.parameters.get(i).getFirstTaskChar().identifier);
}
State stateToRemove = state.step(this.parameters.get(i).getFirstTaskChar().identifier);
state.getTransitions().remove(new Transition(this.parameters.get(i).getFirstTaskChar().identifier, stateToRemove));
RelevanceAwareTransition newTransition = new RelevanceAwareTransition(this.parameters.get(i).getFirstTaskChar().identifier, autom.getInitialState(), this.parameters.get(i).getFirstTaskChar().taskClass.toString());
newTransition.setRelevance(TransitionRelevance.RELEVANT);
state.addTransition(newTransition);
return autom;
}
}
| 4,085 | 31.688 | 215 | java |
Janus | Janus-master/src/minerful/stringsmaker/MinerFulStringTracesMaker.java | package minerful.stringsmaker;
import java.io.FileWriter;
import java.io.IOException;
import minerful.io.encdec.log.IOutEncoder;
import minerful.io.encdec.log.MxmlEncoder;
import minerful.io.encdec.log.XesEncoder;
import minerful.stringsmaker.params.StringTracesMakerCmdParameters;
import minerful.utils.MessagePrinter;
import nl.flotsam.xeger.Xeger;
import org.apache.log4j.Logger;
@Deprecated
public class MinerFulStringTracesMaker {
private static Logger logger = Logger.getLogger(MinerFulStringTracesMaker.class.getCanonicalName());
public String[] makeTraces(StringTracesMakerCmdParameters params) {
String regexp = "(" + params.regexps[0] + ")";
Double avgChrsPerString = 0.0;
long totalChrs = 0L;
// building the intersection of the regular expressions
for (int i = 1; i < params.regexps.length; i++) {
regexp += "&(" + params.regexps[i] + ")";
}
// limiting the vocabulary
String regexpLimitingTheVocabulary = "";
for (Character s : params.alphabet) {
regexpLimitingTheVocabulary += s;
}
regexp = "([" + regexpLimitingTheVocabulary + "]*)&" + regexp;
// limiting the number of characters per string
if (params.isMinChrsPerStringGiven() || params.isMaxChrsPerStringGiven()) {
regexp =
regexp +
"&(.{" +
( params.isMinChrsPerStringGiven()
? params.printMinChrsPerString()
: "0"
) +
"," +
( params.isMaxChrsPerStringGiven()
? params.printMaxChrsPerString()
: ""
) +
"})";
}
// generating random strings
Xeger generator = new Xeger(regexp);
String[] testBedArray = new String[params.size.intValue()];
int zeroPaddingCharsAmount = (int)(Math.ceil(Math.log10(testBedArray.length)));
if (zeroPaddingCharsAmount < 1)
zeroPaddingCharsAmount = 1;
for (int i = 0; i < testBedArray.length; i++) {
testBedArray[i] = generator.generate();
totalChrs += testBedArray[i].length();
logger.trace(String.format("%0" + zeroPaddingCharsAmount + "d", (i)) + ")\t" + testBedArray[i]);
}
avgChrsPerString = 1.0 * totalChrs / params.size;
logger.trace(
"\n"
+ "[Testbed]"
+ (
"\n\n"
+ "Regular expression(s) generating the proofs: " + params.printRegExps() + "\n"
+ "(extended: " + regexp + ")\n"
+ "conjunction of " + params.getNumberOfConstraints() + " constraint(s)" + "\n"
+ "over " + params.size + " cases" + "\n"
+ "(length of strings ranging from " + params.printMinChrsPerString()
+ " to " + params.printMaxChrsPerString() + ")\n"
+ "(average length of strings: " + avgChrsPerString + ")\n"
+ "with the alphabet: " + params.printAlphabet()
).replaceAll("\n", "\n\t")
);
if (store(params, testBedArray) && params.logFile != null) {
logger.info("Log file stored in: " + params.logFile.getAbsolutePath());
}
return testBedArray;
}
public boolean store(StringTracesMakerCmdParameters params, String[] traces) {
// saving
IOutEncoder outEnco = null;
switch (params.outputEncoding) {
case xes:
outEnco = new XesEncoder(traces);
break;
case mxml:
outEnco = new MxmlEncoder(traces);
break;
default:
break;
}
if (outEnco != null) {
try {
if (params.logFile != null) {
outEnco.encodeToFile(params.logFile);
} else {
MessagePrinter.printlnOut(outEnco.encodeToString());
System.out.flush();
}
} catch (IOException e) {
logger.error("Encoding error", e);
return false;
}
} else {
FileWriter fileWri = null;
if (params.logFile != null) {
try {
fileWri = new FileWriter(params.logFile);
} catch (IOException e) {
logger.error("File writing error", e);
return false;
}
if (traces.length > 0) {
StringBuffer tracesBuffer = new StringBuffer();
for (int i = 0; i < traces.length; i++) {
tracesBuffer.append(traces[i] + "\n");
}
try {
fileWri.write(tracesBuffer.toString());
fileWri.flush();
} catch (IOException e) {
logger.error("File writing error", e);
return false;
}
}
}
}
return true;
}
} | 5,070 | 33.263514 | 109 | java |
Janus | Janus-master/src/minerful/stringsmaker/params/StringTracesMakerCmdParameters.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package minerful.stringsmaker.params;
import java.io.File;
import minerful.io.encdec.TaskCharEncoderDecoder;
import minerful.logmaker.params.LogMakerParameters;
import minerful.params.ParamsManager;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
public class StringTracesMakerCmdParameters extends ParamsManager {
public static final String OUTPUT_FILE_PARAM_NAME = "oLF";
public static final String OUT_ENC_PARAM_NAME = "oE";
public static final char SIZE_PARAM_NAME = 'L';
public static final char MAX_LEN_PARAM_NAME = 'M';
public static final char MIN_LEN_PARAM_NAME = 'm';
public static final char ALPHABET_PARAM_NAME = 'a';
public static final char REG_EXPS_PARAM_NAME = 'r';
public static final String ALPHABET_CHARACTERS_SEPARATOR = ":";
public static final Character[] TEST_ALPHABET =
{'n', 'p', 'r', 'c'};
public static final String TEST_REGEXP =
// "[bcdef]*((a[acdef]*b)|(b[bcdef]*a))+[bcdef]*";
// "[bc]*((a[ac]*b)|(b[bc]*a))+[b]*";
"("
+
// "[prc]*(n[prc]*)+[prc]*" + // Participation(n)
// ")&(" +
// "[prc]*(n)?[prc]*" + // Once(n)
// ")&(" +
// "[nprc]*n" + // End(n)
// ")&(" +
"[rc]*(p[nprc]*n)*[rc]*" + // Succession(p, n)
// ")&(" +
// "[npc]*(r[nprc]p)*[npc]*" + // Response(r, p)
// ")&(" +
// "[npr]*((c[nprc]*p)|(p[nprc]*c))*[npr]*" + // RespondedExistence(c, p)
// "[npr]*(r[npr]*c)*[npr]*" + // AlternatePrecedence(r, c); ^[^s]*(r[^s]*s)*[^s]*$
// "n[nrc]+[nprc]+c" +
")";
public static final Long DEFAULT_SIZE = 100L;
public static final Integer DEFAULT_MIN_TRACE_LENGTH = 0;
public static final Integer DEFAULT_MAX_TRACE_LENGTH = Integer.MAX_VALUE;
public String[] regexps;
public Character[] alphabet;
public Integer minChrsPerString;
public Integer maxChrsPerString;
public Long size;
public File logFile;
public LogMakerParameters.Encoding outputEncoding;
public StringTracesMakerCmdParameters() {
super();
regexps = new String[]{TEST_REGEXP};
alphabet = TEST_ALPHABET;
minChrsPerString = DEFAULT_MIN_TRACE_LENGTH;
maxChrsPerString = DEFAULT_MAX_TRACE_LENGTH;
size = DEFAULT_SIZE;
logFile = null;
outputEncoding = LogMakerParameters.Encoding.strings;
}
public StringTracesMakerCmdParameters(Options options, String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(options, args);
}
public StringTracesMakerCmdParameters(String[] args) {
this();
// parse the command line arguments
this.parseAndSetup(new Options(), args);
}
@Override
protected void setup(CommandLine line) {
// validate that block-size has been set
this.regexps = line.getOptionValues(StringTracesMakerCmdParameters.REG_EXPS_PARAM_NAME);
if (this.regexps == null)
this.regexps = new String[]{TEST_REGEXP};
if (line.getOptionValues(StringTracesMakerCmdParameters.ALPHABET_PARAM_NAME) != null) {
this.alphabet = TaskCharEncoderDecoder.faultyEncode(line.getOptionValue(StringTracesMakerCmdParameters.ALPHABET_PARAM_NAME).toString().split(ALPHABET_CHARACTERS_SEPARATOR));
}
this.minChrsPerString =
Integer.valueOf(
line.getOptionValue(StringTracesMakerCmdParameters.MIN_LEN_PARAM_NAME,this.minChrsPerString.toString()));
this.maxChrsPerString =
Integer.valueOf(
line.getOptionValue(StringTracesMakerCmdParameters.MAX_LEN_PARAM_NAME, this.maxChrsPerString.toString()));
this.size =
Long.valueOf(line.getOptionValue(StringTracesMakerCmdParameters.SIZE_PARAM_NAME, this.size.toString()));
this.outputEncoding = Enum.valueOf(
LogMakerParameters.Encoding.class,
line.getOptionValue(OUT_ENC_PARAM_NAME, this.outputEncoding.toString())
);
this.logFile = openOutputFile(line, OUTPUT_FILE_PARAM_NAME);
}
@Override
public Options addParseableOptions(Options options) {
Options myOptions = listParseableOptions();
for (Object myOpt: myOptions.getOptions())
options.addOption((Option)myOpt);
return options;
}
@Override
public Options listParseableOptions() {
return parseableOptions();
}
@SuppressWarnings("static-access")
public static Options parseableOptions() {
Options options = new Options();
options.addOption(
Option.builder(String.valueOf(StringTracesMakerCmdParameters.REG_EXPS_PARAM_NAME))
.hasArgs().argName("reg exp list")
.longOpt("regexp")
.desc("unbound regular expressions list generating the strings (in conjunction)")
.type(String.class)
.build()
);
options.addOption(
Option.builder(String.valueOf(StringTracesMakerCmdParameters.ALPHABET_PARAM_NAME))
.hasArg().argName("alphabet")
.longOpt("alphabet")
.desc("\"" + ALPHABET_CHARACTERS_SEPARATOR + "\"-separated list of characters in the alphabet (e.g., a:b:c)")
.build()
);
options.addOption(
Option.builder(String.valueOf(StringTracesMakerCmdParameters.MIN_LEN_PARAM_NAME))
.hasArg().argName("min_length")
.longOpt("minlen")
.desc("minimum length of the generated strings")
.type(Integer.class)
.build()
);
options.addOption(
Option.builder(String.valueOf(StringTracesMakerCmdParameters.MAX_LEN_PARAM_NAME))
.hasArg().argName("max_length")
.longOpt("maxlen")
.desc("maximum length of the generated strings")
.type(Integer.class)
.build()
);
options.addOption(
Option.builder(String.valueOf(StringTracesMakerCmdParameters.SIZE_PARAM_NAME))
.hasArg().argName("number of strings")
.longOpt("size")
.desc("number of strings to run on")
.type(Double.class)
.build()
);
options.addOption(
Option.builder(StringTracesMakerCmdParameters.OUTPUT_FILE_PARAM_NAME)
.hasArg().argName("file path")
.longOpt("out-log")
.desc("path to the file to write the log in")
.type(String.class)
.build()
);
options.addOption(
Option.builder(StringTracesMakerCmdParameters.OUT_ENC_PARAM_NAME)
.hasArg().argName("encoding")
.longOpt("out-enc")
.desc("encoding language for output log " + printValues(LogMakerParameters.Encoding.values()))
.type(String.class)
.build()
);
return options;
}
public int getNumberOfConstraints() {
return this.regexps.length;
}
public String printAlphabet() {
StringBuilder alphabetStringBuffer = new StringBuilder();
alphabetStringBuffer.append("{");
for (Character chr: this.alphabet) {
alphabetStringBuffer.append(chr);
alphabetStringBuffer.append(", ");
}
return alphabetStringBuffer.substring(0, alphabetStringBuffer.length() -2) + "}";
}
public String printMaxChrsPerString() {
return (this.isMaxChrsPerStringGiven() ? String.valueOf(this.maxChrsPerString) : "*");
}
public String printMinChrsPerString() {
return (this.isMinChrsPerStringGiven() ? String.valueOf(this.minChrsPerString) : "0");
}
public String printRegExps() {
StringBuffer regExpsStringBuffer = new StringBuffer();
regExpsStringBuffer.append("\n{");
for (String re: this.regexps) {
regExpsStringBuffer.append("\n\t");
regExpsStringBuffer.append(re);
regExpsStringBuffer.append(",");
}
return regExpsStringBuffer.substring(0, regExpsStringBuffer.length() -1) + "\n}";
}
public boolean isMinChrsPerStringGiven() {
return (this.minChrsPerString > 0);
}
public boolean isMaxChrsPerStringGiven() {
return (this.maxChrsPerString < Integer.MAX_VALUE);
}
} | 8,980 | 39.822727 | 185 | java |
Janus | Janus-master/src/minerful/utils/MessagePrinter.java | package minerful.utils;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Properties;
import java.util.StringTokenizer;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.LogMF;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import minerful.index.comparator.modular.ConstraintSortingPolicy;
import minerful.params.SystemCmdParameters;
public class MessagePrinter {
Logger logger = null;
static DecimalFormat df = new DecimalFormat("0");
static { df.setMaximumFractionDigits(16); }
public static final String ARRAY_TOKENISER_SEPARATOR = ":";
public static void configureLogging(SystemCmdParameters.DebugLevel debugLevel) {
String threshold = "ALL";
switch (debugLevel) {
case none:
threshold = "INFO";
break;
case info:
threshold = "INFO";
break;
case all:
threshold = "ALL";
break;
case trace:
threshold = "TRACE";
break;
case debug:
threshold = "DEBUG";
break;
default:
break;
}
Properties debugProperties = new Properties();
debugProperties.setProperty("log4j.rootLogger", threshold + ", A1");
debugProperties.setProperty("log4j.appender.A1", "org.apache.log4j.ConsoleAppender");
debugProperties.setProperty("log4j.appender.A1.Threshold", threshold);
debugProperties.setProperty("log4j.appender.A1.layout", "org.apache.log4j.PatternLayout");
debugProperties.setProperty("log4j.appender.A1.layout.ConversionPattern", "%p [%t] %c{2} (%M:%L) - %m%n");
PropertyConfigurator.configure(debugProperties);
}
protected MessagePrinter(Class<?> invokingClass) {
this.logger = Logger.getLogger(invokingClass);
}
protected MessagePrinter(Object invokingObject) {
this(invokingObject.getClass());
}
public static MessagePrinter getInstance(Object source) {
return new MessagePrinter(source.getClass());
}
public static MessagePrinter getInstance(Class<?> invokingClass) {
return new MessagePrinter(invokingClass);
}
public static String printValues(Object... values) {
StringBuilder valuesStringBuilder = new StringBuilder();
if (values.length > 1) {
valuesStringBuilder.append("{");
}
for (int i = 0; i < values.length; i++) {
valuesStringBuilder.append("'");
valuesStringBuilder.append(fromEnumValueToString(values[i]));
valuesStringBuilder.append("'");
if (i < values.length -1) {
valuesStringBuilder.append(",");
}
}
if (values.length > 1) {
valuesStringBuilder.append("}");
}
return valuesStringBuilder.toString();
}
public static String[] tokenise(String arrayStringOfTokens) {
if (arrayStringOfTokens == null)
return null;
StringTokenizer strTok = new StringTokenizer(arrayStringOfTokens, ARRAY_TOKENISER_SEPARATOR);
String[] tokens = new String[strTok.countTokens()];
int i = 0;
while (strTok.hasMoreTokens())
tokens[i++] = strTok.nextToken();
return tokens;
}
public static String fromEnumValuesToTokenJoinedString(Object... tokens) {
String[] tokenStrings = new String[tokens.length];
int i = 0;
for (Object token : tokens) {
tokenStrings[i++] = fromEnumValueToString(token);
}
return StringUtils.join(tokenStrings, ARRAY_TOKENISER_SEPARATOR);
}
public static String fromEnumValueToString(Object token) {
return token.toString().trim().toLowerCase().replace("_", "-");
}
public static void printlnOut(String s) {
System.out.println(s);
}
public static void printlnOut() {
System.out.println();
}
public static void printOut(String s) {
System.out.print(s);
}
public static void printlnError(String s) {
System.err.println(s);
}
public static String formatFloatNumForCSV(float num) {
return df.format(num);
}
public static String formatFloatNumForCSV(double num) {
return df.format(num);
}
public void info(String message) {
this.logger.info(message);
}
public void info(String pattern, Object... params) {
LogMF.info(this.logger, pattern, params);
}
public void warn(String message) {
this.logger.warn(message);
}
public void warn(String pattern, Object... params) {
LogMF.warn(this.logger, pattern, params);
}
public void debug(String message) {
this.logger.debug(message);
}
public void debug(String pattern, Object... params) {
LogMF.debug(this.logger, pattern, params);
}
public void trace(String message) {
this.logger.trace(message);
}
public void trace(String pattern, Object... params) {
LogMF.trace(this.logger, pattern, params);
}
public void error(String message) {
this.logger.error(message);
}
public void error(String pattern, Object... params) {
LogMF.error(this.logger, pattern, params);
}
public void error(String message, Throwable e) {
this.logger.error(message, e);
}
public void error(String pattern, Throwable e, Object... params) {
LogMF.error(this.logger, e, pattern, params);
}
public void fatal(String message) {
this.logger.fatal(message);
}
public void fatal(String pattern, Object... params) {
LogMF.fatal(this.logger, pattern, params);
}
public void fatal(String message, Throwable e) {
this.logger.fatal(message, e);
}
public void fatal(String pattern, Throwable e, Object... params) {
LogMF.fatal(this.logger, e, pattern, params);
}
} | 5,524 | 28.388298 | 114 | java |
Janus | Janus-master/src/minerful/utils/MinerFulLogStatsPrinter.java | package minerful.utils;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import minerful.AbstractMinerFulStarter;
import minerful.MinerFulMinerLauncher;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.logparser.LogParser;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.params.InputLogCmdParameters;
import minerful.params.SystemCmdParameters;
public class MinerFulLogStatsPrinter extends AbstractMinerFulStarter {
private static MessagePrinter logger = MessagePrinter.getInstance(MinerFulLogStatsPrinter.class);
@Override
public Options setupOptions() {
Options cmdLineOptions = new Options();
Options inputOptions = InputLogCmdParameters.parseableOptions(),
systemOptions = SystemCmdParameters.parseableOptions();
for (Object opt: inputOptions.getOptions()) {
cmdLineOptions.addOption((Option)opt);
}
for (Object opt: systemOptions.getOptions()) {
cmdLineOptions.addOption((Option)opt);
}
return cmdLineOptions;
}
/**
* @param args
* the command line arguments: [regular expression] [number of
* strings] [minimum number of characters per string] [maximum
* number of characters per string] [alphabet]...
*/
public static void main(String[] args) {
MinerFulLogStatsPrinter minerMinaStarter = new MinerFulLogStatsPrinter();
Options cmdLineOptions = minerMinaStarter.setupOptions();
InputLogCmdParameters inputParams =
new InputLogCmdParameters(
cmdLineOptions,
args);
MinerFulCmdParameters minerFulParams =
new MinerFulCmdParameters(
cmdLineOptions,
args);
SystemCmdParameters systemParams =
new SystemCmdParameters(
cmdLineOptions,
args);
if (systemParams.help) {
systemParams.printHelp(cmdLineOptions);
System.exit(0);
}
if (!isEventLogGiven(cmdLineOptions, inputParams, systemParams)) {
System.exit(1);
}
MessagePrinter.configureLogging(systemParams.debugLevel);
logger.info("Loading log...");
LogParser logParser = MinerFulMinerLauncher.deriveLogParserFromLogFile(
inputParams,
minerFulParams);
TaskCharArchive taskCharArchive = logParser.getTaskCharArchive();
MessagePrinter.printlnOut("Log file: " + inputParams.inputLogFile);
MessagePrinter.printlnOut("Number of traces: " + logParser.length());
MessagePrinter.printlnOut("Numer of events: " + logParser.numberOfEvents());
MessagePrinter.printlnOut("Minimum trace length: " + logParser.minimumTraceLength());
MessagePrinter.printlnOut("Maximum trace length: " + logParser.maximumTraceLength());
MessagePrinter.printlnOut("Event classifier: " + inputParams.eventClassification);
MessagePrinter.printlnOut("Event classes (raw): " + taskCharArchive);
MessagePrinter.printlnOut("Event classes (list): " + taskCharArchive.getTaskChars());
}
public static boolean isEventLogGiven(Options cmdLineOptions, InputLogCmdParameters inputParams,
SystemCmdParameters systemParams) {
if (inputParams.inputLogFile == null) {
systemParams.printHelpForWrongUsage("Input log file missing! Please use the " +
InputLogCmdParameters.INPUT_LOGFILE_PATH_PARAM_NAME +
" option.",
cmdLineOptions);
return false;
}
return true;
}
} | 3,310 | 32.785714 | 98 | java |
Janus | Janus-master/src/minerful/utils/RandomCharGenerator.java | package minerful.utils;
import java.security.SecureRandom;
import com.thoughtworks.xstream.core.util.Base64Encoder;
public class RandomCharGenerator {
public static String generateChar(int bytearraysize){
SecureRandom sr = new SecureRandom();
byte[] randomBytes = new byte[bytearraysize];
String randChar = "+";
while (!randChar.matches("\\w+")) {
sr.nextBytes(randomBytes);
randChar = new Base64Encoder().encode(randomBytes);
}
return randChar;
}
} | 473 | 23.947368 | 56 | java |
Janus | Janus-master/src/minerful/utils/ResourceReader.java | package minerful.utils;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import minerful.io.encdec.declaremap.DeclareMapEncoderDecoder;
public class ResourceReader {
public static final String readResource(String resourcePath) {
StringBuilder sBuilder = new StringBuilder();
BufferedReader buReader = new BufferedReader(
new InputStreamReader(
loadResource(resourcePath)
)
);
String inLine;
try {
inLine = buReader.readLine();
while (inLine != null) {
sBuilder.append(inLine);
sBuilder.append("\n");
inLine = buReader.readLine();
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return sBuilder.toString();
}
public static final InputStream loadResource(String resourcePath) {
return ResourceReader.class.getClassLoader().getResourceAsStream(resourcePath);
}
public static final InputStream loadResource(String libraryUrl, String resourcePath, Class<?> classLoaderProvider) {
URL url = null;
try {
url = new URL(libraryUrl);
} catch (MalformedURLException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
URL[] urls = {url};
URLClassLoader classLoader = new URLClassLoader(urls, classLoaderProvider.getClassLoader());
Thread.currentThread().setContextClassLoader(classLoader);
return Thread.currentThread().getContextClassLoader().getResourceAsStream(resourcePath);
}
} | 1,607 | 27.210526 | 117 | java |
Janus | Janus-master/src/minerful/utils/XesLogTracesSorterLauncher.java | package minerful.utils;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import org.deckfour.xes.in.XesXmlGZIPParser;
import org.deckfour.xes.in.XesXmlParser;
import org.deckfour.xes.model.XLog;
import org.deckfour.xes.out.XesXmlSerializer;
import minerful.logmaker.XesLogTracesSorter;
import minerful.logmaker.params.XesLogSorterParameters;
/**
* Launches the sorting of XES event logs.
*/
public class XesLogTracesSorterLauncher {
public static MessagePrinter logger = MessagePrinter.getInstance(XesLogTracesSorterLauncher.class);
private XesLogSorterParameters xeSortParams;
private XesXmlParser parser = null;
public XesLogTracesSorterLauncher(XesLogSorterParameters xeSortParams) {
this.xeSortParams = xeSortParams;
}
public void sortAndStoreXesLog() {
logger.info("Loading the XES log from %s ...", xeSortParams.inputXesFile);
XLog xLog = null;
try {
xLog = readXLog(xeSortParams);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
logger.info("Sorting the XES log...");
XesLogTracesSorter trSort = new XesLogTracesSorter(xeSortParams.tracesSortingCriteria);
XLog nuXLog = trSort.sortXesLog(xLog);
logger.info("Saving the XES log on %s...", xeSortParams.outputXesFile);
trSort.renameEventLog(nuXLog);
try {
storeSortedXesLog(nuXLog);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void storeSortedXesLog(XLog nuXLog) throws IOException, FileNotFoundException {
new XesXmlSerializer().serialize(nuXLog, new FileOutputStream(xeSortParams.outputXesFile));
}
private XLog readXLog(XesLogSorterParameters xeSortParams) throws Exception {
this.parser = new XesXmlParser();
if (!parser.canParse(xeSortParams.inputXesFile)) {
parser = new XesXmlGZIPParser();
if (!parser.canParse(xeSortParams.inputXesFile)) {
throw new IllegalArgumentException(
"Unparsable log file: " + xeSortParams.inputXesFile.getAbsolutePath());
}
}
return parser.parse(xeSortParams.inputXesFile).get(0);
}
} | 2,254 | 32.161765 | 100 | java |
Janus | Janus-master/src/minerful/utils/XesLogTracesSorterStarter.java | package minerful.utils;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import minerful.AbstractMinerFulStarter;
import minerful.logmaker.params.XesLogSorterParameters;
import minerful.params.SystemCmdParameters;
public class XesLogTracesSorterStarter extends AbstractMinerFulStarter {
private static MessagePrinter logger = MessagePrinter.getInstance(XesLogTracesSorterStarter.class);
@Override
public Options setupOptions() {
Options cmdLineOptions = new Options();
Options xesLogSorterOptions = XesLogSorterParameters.parseableOptions(),
systemOptions = SystemCmdParameters.parseableOptions();
for (Object opt: xesLogSorterOptions.getOptions()) {
cmdLineOptions.addOption((Option)opt);
}
for (Object opt: systemOptions.getOptions()) {
cmdLineOptions.addOption((Option)opt);
}
return cmdLineOptions;
}
/**
* @param args
* the command line arguments: [regular expression] [number of
* strings] [minimum number of characters per string] [maximum
* number of characters per string] [alphabet]...
*/
public static void main(String[] args) {
XesLogTracesSorterStarter xesTraceStorter = new XesLogTracesSorterStarter();
Options cmdLineOptions = xesTraceStorter.setupOptions();
XesLogSorterParameters xesLogSorterParams =
new XesLogSorterParameters(
cmdLineOptions,
args);
SystemCmdParameters systemParams =
new SystemCmdParameters(
cmdLineOptions,
args);
if (systemParams.help) {
systemParams.printHelp(cmdLineOptions);
System.exit(0);
}
if (!areEventLogsProvided(cmdLineOptions, xesLogSorterParams, systemParams)) {
System.exit(1);
}
MessagePrinter.configureLogging(systemParams.debugLevel);
if (!areEventLogsProvided(cmdLineOptions, xesLogSorterParams, systemParams)) {
System.exit(1);
}
XesLogTracesSorterLauncher xeSorter = new XesLogTracesSorterLauncher(xesLogSorterParams);
xeSorter.sortAndStoreXesLog();
}
public static boolean areEventLogsProvided(Options cmdLineOptions, XesLogSorterParameters xesSortParams,
SystemCmdParameters systemParams) {
if (xesSortParams.inputXesFile == null) {
systemParams.printHelpForWrongUsage("Input XES log file missing! Please use the " +
XesLogSorterParameters.INPUT_XES_PARAM_NAME +
" option.",
cmdLineOptions);
return false;
}
if (xesSortParams.outputXesFile == null) {
systemParams.printHelpForWrongUsage("Output XES log file missing! Please use the " +
XesLogSorterParameters.OUTPUT_XES_PARAM_NAME +
" option.",
cmdLineOptions);
return false;
}
return true;
}
} | 2,673 | 30.458824 | 105 | java |
Janus | Janus-master/src/trashbin/minerful/MinerFulSimuSubCheckStarter.java | package trashbin.minerful;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import minerful.MinerFulOutputManagementLauncher;
import minerful.MinerFulSimuStarter;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharSet;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintFamily;
import minerful.concept.constraint.ConstraintFamily.RelationConstraintSubFamily;
import minerful.concept.constraint.relation.AlternateResponse;
import minerful.concept.constraint.relation.ChainPrecedence;
import minerful.concept.constraint.relation.Precedence;
import minerful.concept.constraint.relation.RespondedExistence;
import minerful.concept.constraint.relation.Response;
import minerful.index.LinearConstraintsIndexFactory;
import minerful.io.params.OutputModelParameters;
import minerful.logparser.LogEventClassifier.ClassificationType;
import minerful.logparser.LogParser;
import minerful.logparser.StringLogParser;
import minerful.miner.params.MinerFulCmdParameters;
import minerful.params.SystemCmdParameters;
import minerful.params.ViewCmdParameters;
import minerful.postprocessing.params.PostProcessingCmdParameters;
import minerful.postprocessing.pruning.SubsumptionCheckSummaryMaker;
import minerful.stringsmaker.MinerFulStringTracesMaker;
import minerful.stringsmaker.params.StringTracesMakerCmdParameters;
import minerful.utils.MessagePrinter;
import org.apache.commons.cli.Options;
public class MinerFulSimuSubCheckStarter extends MinerFulSimuStarter {
public static MessagePrinter logger = MessagePrinter.getInstance(MinerFulSimuSubCheckStarter.class);
public static void main(String[] args) {
MinerFulSimuStarter minerSimuStarter = new MinerFulSimuStarter();
Options cmdLineOptions = minerSimuStarter.setupOptions();
ViewCmdParameters viewParams =
new ViewCmdParameters(
cmdLineOptions,
args);
StringTracesMakerCmdParameters tracesMakParams =
new StringTracesMakerCmdParameters(
cmdLineOptions,
args);
MinerFulCmdParameters minerFulParams =
new MinerFulCmdParameters(
cmdLineOptions,
args);
SystemCmdParameters systemParams =
new SystemCmdParameters(
cmdLineOptions,
args);
OutputModelParameters outParams =
new OutputModelParameters(
cmdLineOptions,
args);
PostProcessingCmdParameters postParams =
new PostProcessingCmdParameters(
cmdLineOptions,
args);
if (systemParams.help) {
systemParams.printHelp(cmdLineOptions);
System.exit(0);
}
MessagePrinter.configureLogging(systemParams.debugLevel);
String[] testBedArray = new String[0];
testBedArray = new MinerFulStringTracesMaker().makeTraces(tracesMakParams);
try {
LogParser stringLogParser = new StringLogParser(testBedArray, ClassificationType.NAME);
TaskCharArchive taskCharArchive = new TaskCharArchive(stringLogParser.getEventEncoderDecoder().getTranslationMap());
ProcessModel processModel = minerSimuStarter.mine(stringLogParser, minerFulParams, postParams, taskCharArchive);
MinerFulOutputManagementLauncher proViewStarter = new MinerFulOutputManagementLauncher();
proViewStarter.manageOutput(processModel, viewParams, outParams, systemParams, stringLogParser);
/*
AlternateResponse(a, {b,c})
ChainPrecedence({a,b}, c)
Precedence({a,b,c,d}, e)
RespondedExistence(a, {b,c,d,e})
Response(a, {b,c})
ChainPrecedence({a,b,d}, c)
*/
TaskChar
a = taskCharArchive.getTaskChar('A'),
b = taskCharArchive.getTaskChar('B'),
c = taskCharArchive.getTaskChar('C'),
d = taskCharArchive.getTaskChar('D'),
e = taskCharArchive.getTaskChar('E');
Constraint[] model = new Constraint[]{
new AlternateResponse(new TaskCharSet(a), new TaskCharSet(Arrays.asList(new TaskChar[]{b,c}))),
new ChainPrecedence(new TaskCharSet(Arrays.asList(new TaskChar[]{a,b})), new TaskCharSet(c)),
new Precedence(new TaskCharSet(Arrays.asList(new TaskChar[]{a,b,c,d})), new TaskCharSet(e)),
new RespondedExistence(new TaskCharSet(a), new TaskCharSet(Arrays.asList(new TaskChar[]{b,c,d,e}))),
new Response(new TaskCharSet(a), new TaskCharSet(Arrays.asList(new TaskChar[]{b,c}))),
new ChainPrecedence(new TaskCharSet(Arrays.asList(new TaskChar[]{a,b,d})), new TaskCharSet(c)),
};
SubsumptionCheckSummaryMaker suChe = new SubsumptionCheckSummaryMaker(model);
Collection<Constraint> cns = LinearConstraintsIndexFactory.getAllConstraints(processModel.bag);
// Leave out all non-relation constraints
Iterator<Constraint> cnsIt = cns.iterator();
while (cnsIt.hasNext()) {
Constraint current = cnsIt.next();
if (!current.getFamily().equals(ConstraintFamily.RELATION) || !current.getSubFamily().equals(RelationConstraintSubFamily.SINGLE_ACTIVATION)) {
cnsIt.remove();
}
}
suChe.checkSubsumption(cns);
MessagePrinter.printlnOut(suChe.csv());
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.exit(1);
}
}
} | 5,513 | 41.091603 | 152 | java |
Janus | Janus-master/src/trashbin/minerful/MinerFulVacuityChecker2.java | package trashbin.minerful;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import minerful.checking.ConstraintsFitnessEvaluator;
import minerful.checking.ProcessSpecificationFitnessEvaluator;
import minerful.checking.relevance.dao.ConstraintsFitnessEvaluationsMap;
import minerful.checking.relevance.dao.ModelFitnessEvaluation;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharFactory;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.existence.AtMostOne;
import minerful.concept.constraint.existence.End;
import minerful.concept.constraint.existence.Init;
import minerful.concept.constraint.existence.Participation;
import minerful.concept.constraint.relation.AlternatePrecedence;
import minerful.concept.constraint.relation.AlternateResponse;
import minerful.concept.constraint.relation.AlternateSuccession;
import minerful.concept.constraint.relation.ChainPrecedence;
import minerful.concept.constraint.relation.ChainResponse;
import minerful.concept.constraint.relation.ChainSuccession;
import minerful.concept.constraint.relation.CoExistence;
import minerful.concept.constraint.relation.NotChainSuccession;
import minerful.concept.constraint.relation.NotCoExistence;
import minerful.concept.constraint.relation.NotSuccession;
import minerful.concept.constraint.relation.Precedence;
import minerful.concept.constraint.relation.RespondedExistence;
import minerful.concept.constraint.relation.Response;
import minerful.concept.constraint.relation.Succession;
import minerful.io.encdec.declaremap.DeclareMapEncoderDecoder;
import minerful.io.encdec.declaremap.DeclareMapReaderWriter;
import minerful.logparser.LogEventClassifier.ClassificationType;
import minerful.logparser.LogParser;
import minerful.logparser.LogTraceParser;
import minerful.logparser.StringLogParser;
import minerful.logparser.XesLogParser;
import minerful.params.SystemCmdParameters.DebugLevel;
import minerful.utils.MessagePrinter;
public class MinerFulVacuityChecker2 {
public static MessagePrinter logger = MessagePrinter.getInstance(MinerFulVacuityChecker2.class);
/**
* Task place-holders to be used as parameters for the constraint templates to check.
*/
public static TaskChar
a = new TaskChar('A'),
b = new TaskChar('B'),
c = new TaskChar('C'),
x = new TaskChar('X'),
y = new TaskChar('Y');
/**
* Constraint templates to be checked.
*/
// Toggle the comment to add/remove the template from the set of checked ones.
public static Constraint[] parametricConstraints =
new Constraint[]{
// new SequenceResponse21(a,b,x),
// new SequenceResponse22(a,b,x,y),
// new SequenceResponse32(a,b,c,x,y),
new Participation(a), // a.k.a. Existence(1, a)
new AtMostOne(a), // a.k.a. Absence(2, a)
new Init(a),
new End(a),
new RespondedExistence(a,b),
new RespondedExistence(x,y),
new Response(a, b),
new AlternateResponse(a,b),
new ChainResponse(a,b),
new Precedence(a,b),
new AlternatePrecedence(a,b),
new ChainPrecedence(a,b),
new CoExistence(a,b),
new Succession(a,b),
new AlternateSuccession(a, b),
new ChainSuccession(a, b),
new NotChainSuccession(a, b),
new NotSuccession(a, b),
new NotCoExistence(a, b),
};
public static void main(String[] args) throws Exception {
System.err.println(
"#### WARNING"
+ "\n" +
"This class is not yet part of the MINERful framework. It is meant to be the proof-of-concept software for the paper entitled "
+ "\"On the Relevance of a Business Constraint to an Event Log\", authored by C. Di Ciccio, F.M. Maggi, M. Montali, and J. Mendling (DOI: https://doi.org/10.1016/j.is.2018.01.011). "
+ "Please use it for testing purposes only."
+ "\n\n" +
"#### USAGE"
+ "\n" +
"Usage: java " + MinerFulVacuityChecker2.class.getCanonicalName() + " <XES-log-file-path> [threshold] [Declare-map-output-file-path]."
+ "\n" +
"Param: <XES-log-file-path>: the path to a XES event log file (mandatory)"
+ "\n" +
"Param: [threshold]: the ratio of traces in which the constraints have to be non-vacuously satisfied, from 0.0 to 1.0 (default: " + ConstraintsFitnessEvaluator.DEFAULT_FITNESS_THRESHOLD + ") (optional)"
+ "\n" +
"Param: [Declare-map-output-file-path]: the path of the file in which the returned constraints are stored as a Declare Map XML file (by default, no Declare Map XML file is saved) (optional)"
+ "\n\n" +
"#### OUTPUT"
+ "\n" +
"To customise the constraint templates to be checked, please change the code of this class (" + MinerFulVacuityChecker2.class.getCanonicalName() + ") in the specified point and recompile."
+ "\n" +
"The printed output is a CSV-encoding of constraints that are non-vacuously satisfied in the given log. The output can be also saved as a Declare Map XML file by specifying the third optional command parameter (for standard Declare constraints only) -- see above: [Declare-map-output-file-path]."
+ "\n\n" +
"Press any key to continue..."
);
System.in.read();
MessagePrinter.configureLogging(DebugLevel.all);
LogParser loPar = null;
try {
loPar = new XesLogParser(new File(args[0]), ClassificationType.LOG_SPECIFIED);
} catch (Exception e) {
MessagePrinter.printlnOut(args[0] + " is not an XES file");
loPar = new StringLogParser(new File(args[0]), ClassificationType.NAME);
}
TaskCharFactory tChFactory = new TaskCharFactory();
TaskChar
accAssgnd = tChFactory.makeTaskChar("Accepted+Assigned"),
accInProg = tChFactory.makeTaskChar("Accepted+In Progress"),
accptWait = tChFactory.makeTaskChar("Accepted+Wait"),
complClos = tChFactory.makeTaskChar("Completed+Closed"),
qAwaAssgn = tChFactory.makeTaskChar("Queued+Awaiting Assignment"),
nabellOff = tChFactory.makeTaskChar("W_Nabellen offertes"),
oocreated = tChFactory.makeTaskChar("O_CREATED");
Constraint[] toBeChecked = new Constraint[] {
new Response(
tChFactory.makeTaskChar("Accepted+In Progress"),
tChFactory.makeTaskChar("Completed+Closed")),
new Response(
tChFactory.makeTaskChar("Queued+Awaiting Assignment"),
tChFactory.makeTaskChar("Completed+Closed")),
new CoExistence(
tChFactory.makeTaskChar("W_Nabellen offertes"),
tChFactory.makeTaskChar("O_CREATED")),
new AlternatePrecedence(
tChFactory.makeTaskChar("Accepted+In Progress"),
tChFactory.makeTaskChar("Completed+Closed")),
new NotChainSuccession(
tChFactory.makeTaskChar("Completed+Closed"),
tChFactory.makeTaskChar("Accepted+Assigned")),
new NotChainSuccession(
tChFactory.makeTaskChar("Completed+Closed"),
tChFactory.makeTaskChar("Accepted+Wait")),
new RespondedExistence(
tChFactory.makeTaskChar("Accepted+Assigned"),
tChFactory.makeTaskChar("Accepted+In Progress")),
new RespondedExistence(
tChFactory.makeTaskChar("Accepted+Wait"),
tChFactory.makeTaskChar("Accepted+In Progress")),
};
Constraint[] toBeCheckock = new Constraint[] {
new Response(
accInProg,
complClos),
new Response(
qAwaAssgn,
complClos),
new CoExistence(
nabellOff,
oocreated),
new AlternatePrecedence(
accInProg,
complClos),
new NotChainSuccession(
complClos,
accAssgnd),
new NotChainSuccession(
complClos,
accptWait),
new RespondedExistence(
accAssgnd,
accInProg),
new RespondedExistence(
accptWait,
accInProg),
};
ConstraintsBag bag = new ConstraintsBag();
for (Constraint con : toBeChecked) {
bag.add(con);
}
ProcessModel checkSpec = new ProcessModel(bag, "Test spec");
ProcessSpecificationFitnessEvaluator relEvalor =
new ProcessSpecificationFitnessEvaluator(loPar.getEventEncoderDecoder(),checkSpec);
/*
ConstraintsFitnessEvaluator relEvalor =
new ConstraintsFitnessEvaluator(
loPar.getEventEncoderDecoder(),
toBeCheckock);
// loPar.getTaskCharArchive(),
// Arrays.asList(parametricConstraints));
*/
ConstraintsFitnessEvaluationsMap evalon = null;
ModelFitnessEvaluation fitEval = null;
Iterator<LogTraceParser> trItator = loPar.traceIterator();
evalon = relEvalor.runOnTrace(trItator.next());
fitEval = relEvalor.evaluateOnTrace(trItator.next());
/*
if (args.length > 1) {
evalon = relEvalor.runOnLog(loPar,Double.valueOf(args[1]));
} else {
evalon = relEvalor.runOnLog(loPar);
}
*/
MessagePrinter.printlnOut(evalon.printCSV());
MessagePrinter.printlnOut(fitEval.printCSV());
if (args.length > 2) {
logger.debug("Storing constraints as a Declare map on " + args[2]);
Collection<Constraint> nuStandardConstraints = new ArrayList<Constraint>();
Double fitnessThreshold = Double.valueOf(args[1]);
for (Constraint con : relEvalor.getCheckedConstraints()) {
if (con.getFamily() != null && con.getFitness() >= fitnessThreshold) {
nuStandardConstraints.add(con);
}
}
ConstraintsBag coBag = new ConstraintsBag(loPar.getTaskCharArchive().getTaskChars(), nuStandardConstraints);
ProcessModel model = new ProcessModel(loPar.getTaskCharArchive(), coBag);
DeclareMapReaderWriter.marshal(args[2], new DeclareMapEncoderDecoder(model).createDeclareMap());
logger.debug("Done.");
}
}
} | 9,490 | 37.270161 | 300 | java |
Janus | Janus-master/src/trashbin/minerful/concept/AbandonedHeuristicsForAutomatonMaking.java | package trashbin.minerful.concept;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import minerful.automaton.AutomatonFactory;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.TaskCharArchive;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.ConstraintsBag;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.index.LinearConstraintsIndexFactory;
import dk.brics.automaton.Automaton;
public class AbandonedHeuristicsForAutomatonMaking {
public static Automaton buildAutomatonByBoundHeuristicAppliedTwiceInMultiThreading(ProcessModel model) {
Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> map =
LinearConstraintsIndexFactory.indexByImplyingAndImplied(model.bag);
List<TaskChar> taskCharsSortedByNumberOfConnections =
LinearConstraintsIndexFactory.getTaskCharsSortedByNumberOfConnections(
LinearConstraintsIndexFactory.createMapOfConnections(map));
Collection<Constraint> constraints = null;
Collection<String> regularExpressions = null;
AbstractMap<TaskChar, Automaton> subAutomata = new TreeMap<TaskChar, Automaton>();
Map<TaskChar, NavigableSet<Constraint>>
subMap = null,
subMapReverse = null;
Automaton processAutomaton = null;
Set<TaskChar>
taskChars = new TreeSet<TaskChar>(map.keySet()),
taskCharsReverse = new TreeSet<TaskChar>(map.keySet());
for (TaskChar tCh : taskChars) {
subMap = map.get(tCh);
constraints = new ArrayList<Constraint>();
for (TaskChar tChRev : taskCharsReverse) {
if (subMap.containsKey(tChRev) && subMap.get(tChRev) != null && subMap.get(tChRev).size() > 0) {
constraints.addAll(subMap.get(tChRev));
subMap.put(tChRev, null);
}
if (map.containsKey(tChRev)) {
subMapReverse = map.get(tChRev);
if (subMapReverse.containsKey(tCh) && subMapReverse.get(tCh) != null && subMapReverse.get(tCh).size() > 0) {
constraints.addAll(subMapReverse.get(tCh));
subMapReverse.put(tCh, null);
}
}
}
regularExpressions = new ArrayList<String>(constraints.size());
for (Constraint con : constraints) {
regularExpressions.add(con.getRegularExpression());
}
subAutomata.put(tCh, AutomatonFactory.fromRegularExpressions(regularExpressions, model.getTaskCharArchive().getIdentifiersAlphabet()));
}
for (TaskChar tCh : taskCharsSortedByNumberOfConnections) {
if (processAutomaton == null) {
processAutomaton = subAutomata.get(tCh);
} else {
processAutomaton = processAutomaton.intersection(subAutomata.get(tCh));
}
}
return processAutomaton;
}
public static Automaton buildAutomatonByBoundAndDimensionalityHeuristicInMultiThreading(ProcessModel model) {
Map<TaskChar, Map<TaskChar, NavigableSet<Constraint>>> map =
LinearConstraintsIndexFactory.indexByImplyingAndImplied(model.bag);
Collection<Constraint> constraints = null;
Collection<String> regularExpressions = null;
AbstractMap<Character, Collection<String>> indexedRegExps = new TreeMap<Character, Collection<String>>();
Map<TaskChar, NavigableSet<Constraint>>
subMap = null,
subMapReverse = null;
Set<TaskChar>
taskChars = new TreeSet<TaskChar>(map.keySet()),
taskCharsReverse = new TreeSet<TaskChar>(map.keySet());
for (TaskChar tCh : taskChars) {
subMap = map.get(tCh);
constraints = new ArrayList<Constraint>();
for (TaskChar tChRev : taskCharsReverse) {
if (subMap.containsKey(tChRev) && subMap.get(tChRev) != null && subMap.get(tChRev).size() > 0) {
constraints.addAll(subMap.get(tChRev));
subMap.put(tChRev, null);
}
if (map.containsKey(tChRev)) {
subMapReverse = map.get(tChRev);
if (subMapReverse.containsKey(tCh) && subMapReverse.get(tCh) != null && subMapReverse.get(tCh).size() > 0) {
constraints.addAll(subMapReverse.get(tCh));
subMapReverse.put(tCh, null);
}
}
}
regularExpressions = new ArrayList<String>(constraints.size());
for (Constraint con : constraints) {
regularExpressions.add(con.getRegularExpression());
}
indexedRegExps.put(tCh.identifier, regularExpressions);
}
return AutomatonFactory.fromRegularExpressionsByDimensionalityHeuristicInMultiThreading(indexedRegExps, model.getTaskCharArchive().getIdentifiersAlphabet());
}
public static Automaton buildAutomatonByStrictnessHeuristic(ProcessModel model) {
SortedSet<Constraint> constraintsSortedByStrictness = LinearConstraintsIndexFactory.getAllConstraintsSortedByStrictness(model.bag);
List<String> regularExpressions = new ArrayList<String>(constraintsSortedByStrictness.size());
for (Constraint con : constraintsSortedByStrictness) {
regularExpressions.add(con.getRegularExpression());
}
return AutomatonFactory.fromRegularExpressions(regularExpressions, model.getTaskCharArchive().getIdentifiersAlphabet());
}
public static ProcessModel generateNonEvaluatedBinaryModel(TaskCharArchive taskCharArchive) {
ProcessModel proMod = null;
Iterator<TaskChar>
actIter = taskCharArchive.getTaskChars().iterator(),
auxActIter = null;
TaskChar
auxActiParam1 = null,
auxActiParam2 = null;
Collection<Constraint>
conSet = new TreeSet<Constraint>(),
auxConSet = null;
Collection<TaskChar> activitiesLeftToCombine = new TreeSet<TaskChar>(taskCharArchive.getTaskChars());
while (actIter.hasNext()) {
auxActiParam1 = actIter.next();
auxConSet = MetaConstraintUtils.getAllDiscoverableExistenceConstraints(auxActiParam1);
auxConSet = MetaConstraintUtils.createHierarchicalLinks(auxConSet);
conSet.addAll(auxConSet);
activitiesLeftToCombine.remove(auxActiParam1);
auxActIter = activitiesLeftToCombine.iterator();
auxConSet = new TreeSet<Constraint>();
while (auxActIter.hasNext()) {
auxActiParam2 = auxActIter.next();
auxConSet = MetaConstraintUtils.getAllDiscoverableRelationConstraints(auxActiParam1, auxActiParam2);
auxConSet.addAll(MetaConstraintUtils.getAllDiscoverableRelationConstraints(auxActiParam2, auxActiParam1));
auxConSet = MetaConstraintUtils.createHierarchicalLinks(auxConSet);
conSet.addAll(auxConSet);
}
}
ConstraintsBag bag = new ConstraintsBag(taskCharArchive.getTaskChars(), conSet);
proMod = new ProcessModel(taskCharArchive, bag);
return proMod;
}
@Deprecated
public static Automaton buildAutomatonByDimensionalityHeuristic(ProcessModel model) {
TreeMap<Character, Collection<String>> regExpsMap = new TreeMap<Character, Collection<String>>();
// FIXME This is just for testing purposes!!
/*
CharacterRelatedConstraintsBag impliedIndexedBag = ConstraintsIndexFactory.indexByImpliedTaskChar(bag);
for (Constraint con : bag.getConstraintsOf(new TaskChar('a'))) {
if (con.hasReasonablePositiveSupport(threshold) && con.isOfInterest(interest))
regExps.add(con.getRegularExpression());
}
for (Constraint con : impliedIndexedBag.getConstraintsOf(new TaskChar('a'))) {
if (con.hasReasonablePositiveSupport(threshold) && con.isOfInterest(interest))
regExps.add(con.getRegularExpression());
}
*/
for (TaskChar tChr : model.bag.getTaskChars()) {
Collection<String> regExps = new ArrayList<String>();
for (Constraint con : model.bag.getConstraintsOf(tChr)) {
regExps.add(con.getRegularExpression());
}
regExpsMap.put(tChr.identifier, regExps);
}
return AutomatonFactory.fromRegularExpressionsByDimensionalityHeuristicInMultiThreading(regExpsMap, model.getTaskCharArchive().getIdentifiersAlphabet());
}
}
| 7,752 | 38.758974 | 159 | java |
Janus | Janus-master/src/trashbin/minerful/io/encdec/OldManualDeclareConstraintTransferObjectTranslator.java | package trashbin.minerful.io.encdec;
import minerful.concept.TaskCharArchive;
import minerful.concept.TaskCharSetFactory;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.concept.constraint.existence.AtMostOne;
import minerful.concept.constraint.existence.Init;
import minerful.concept.constraint.existence.Participation;
import minerful.concept.constraint.relation.AlternatePrecedence;
import minerful.concept.constraint.relation.AlternateResponse;
import minerful.concept.constraint.relation.AlternateSuccession;
import minerful.concept.constraint.relation.ChainPrecedence;
import minerful.concept.constraint.relation.ChainResponse;
import minerful.concept.constraint.relation.ChainSuccession;
import minerful.concept.constraint.relation.CoExistence;
import minerful.concept.constraint.relation.NotChainSuccession;
import minerful.concept.constraint.relation.NotCoExistence;
import minerful.concept.constraint.relation.NotSuccession;
import minerful.concept.constraint.relation.Precedence;
import minerful.concept.constraint.relation.RespondedExistence;
import minerful.concept.constraint.relation.Response;
import minerful.concept.constraint.relation.Succession;
import minerful.io.encdec.DeclareConstraintTransferObject;
@Deprecated
public class OldManualDeclareConstraintTransferObjectTranslator {
private TaskCharArchive taskCharArchive;
private TaskCharSetFactory taskCharSetFactory;
public OldManualDeclareConstraintTransferObjectTranslator(TaskCharArchive taskCharArchive) {
this.taskCharArchive = taskCharArchive;
this.taskCharSetFactory = new TaskCharSetFactory(taskCharArchive);
}
public Constraint createConstraintOutOfTransferObject(DeclareConstraintTransferObject conTO) {
Constraint minerFulConstraint =
MetaConstraintUtils.makeConstraint(
conTO.minerFulTemplate,
this.taskCharSetFactory.createSetsFromTaskStringsCollection(
conTO.parameters
)
);
if (conTO.support != null) {
minerFulConstraint.setSupport(conTO.support);
}
if (conTO.confidence != null) {
minerFulConstraint.setConfidence(conTO.confidence);
}
if (conTO.interestFactor != null) {
minerFulConstraint.setInterestFactor(conTO.interestFactor);
}
return minerFulConstraint;
}
public Constraint createConstraintOutOfTransferObjectOld(DeclareConstraintTransferObject conTO) {
Constraint minerFulConstraint = null;
switch(conTO.declareMapTemplate) {
case Responded_Existence:
minerFulConstraint = new RespondedExistence(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Response:
minerFulConstraint = new Response(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Alternate_Response:
minerFulConstraint = new AlternateResponse(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Chain_Response:
minerFulConstraint = new ChainResponse(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Precedence:
minerFulConstraint = new Precedence(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Alternate_Precedence:
minerFulConstraint = new AlternatePrecedence(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Chain_Precedence:
minerFulConstraint = new ChainPrecedence(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case CoExistence:
minerFulConstraint = new CoExistence(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Succession:
minerFulConstraint = new Succession(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Alternate_Succession:
minerFulConstraint = new AlternateSuccession(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Chain_Succession:
minerFulConstraint = new ChainSuccession(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Not_Chain_Succession:
minerFulConstraint = new NotChainSuccession(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Not_Succession:
minerFulConstraint = new NotSuccession(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Not_CoExistence:
minerFulConstraint = new NotCoExistence(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0)),
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(1))
);
break;
case Absence2:
minerFulConstraint = new AtMostOne(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0))
);
break;
case Existence:
minerFulConstraint = new Participation(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0))
);
break;
case Init:
minerFulConstraint = new Init(
this.taskCharSetFactory.createSetFromTaskStrings(conTO.parameters.get(0))
);
break;
default:
return null;
}
if (conTO.support != null) {
minerFulConstraint.setSupport(conTO.support);
}
if (conTO.confidence != null) {
minerFulConstraint.setConfidence(conTO.confidence);
}
if (conTO.interestFactor != null) {
minerFulConstraint.setInterestFactor(conTO.interestFactor);
}
return minerFulConstraint;
}
} | 6,678 | 36.948864 | 98 | java |
Janus | Janus-master/src/trashbin/minerful/io/encdec/XesDecoder.java | package trashbin.minerful.io.encdec;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.deckfour.xes.extension.std.XConceptExtension;
import org.deckfour.xes.extension.std.XLifecycleExtension;
import org.deckfour.xes.in.XesXmlGZIPParser;
import org.deckfour.xes.in.XesXmlParser;
import org.deckfour.xes.model.XAttributeLiteral;
import org.deckfour.xes.model.XEvent;
import org.deckfour.xes.model.XLog;
import org.deckfour.xes.model.XTrace;
@Deprecated
public class XesDecoder {
// TODO It must become a user-specified parameter, not a constant
public static final boolean CONSIDER_EVENT_LIFECYCLE = false;
public static final String EVENT_TRANSITION_STATE_INFIX = "+";
private static Logger logger;
protected File xesFile;
protected XesXmlParser parser;
public XesDecoder(File xesFile) throws Exception {
if (logger == null) {
logger = Logger.getLogger(this.getClass().getCanonicalName());
}
parser = new XesXmlParser();
if (!parser.canParse(xesFile)) {
parser = new XesXmlGZIPParser();
if (!parser.canParse(xesFile)) {
throw new IllegalArgumentException("Unparsable log file: " + xesFile.getAbsolutePath());
}
}
this.xesFile = xesFile;
}
public static final String cleanEvtIdentifierTransitionStatus (String evtIdentifier) {
if (evtIdentifier.contains(EVENT_TRANSITION_STATE_INFIX))
return evtIdentifier.substring(0,evtIdentifier.lastIndexOf(EVENT_TRANSITION_STATE_INFIX));
return evtIdentifier;
}
public static final boolean matchesEvtIdentifierWithTransitionStatus (String yourEvtIdentifier, String comparedEvtIdentifier) {
return yourEvtIdentifier.matches(comparedEvtIdentifier + "(\\+.+)?");
}
public static final String glueActivityNameWithTransitionStatus (String evtIdentifier, String transitionStatus) {
if (!evtIdentifier.contains(EVENT_TRANSITION_STATE_INFIX)) {
return evtIdentifier + EVENT_TRANSITION_STATE_INFIX + transitionStatus;
}
logger.warn("The event identifer already contained the transition-status infix separator, " + EVENT_TRANSITION_STATE_INFIX);
return evtIdentifier;
}
public List<List<String>> decode() throws Exception {
StringBuffer debugSBuffer = new StringBuffer();
List<List<String>> outTraces = new ArrayList<List<String>>();
List<XTrace> traces = null;
List<XEvent> events = null;
List<String> outTrace = null;
String evtIdentifier = null;
List<XLog> xLogs = parser.parse(xesFile);
for (XLog xLog : xLogs) {
traces = xLog;
for (XTrace trace : traces) {
debugSBuffer.append("\n<");
events = trace;
outTrace = new ArrayList<String>(events.size());
for (XEvent event : events) {
if (event.getAttributes().get(XConceptExtension.KEY_NAME) != null) {
evtIdentifier = ((XAttributeLiteral)(event.getAttributes().get(XConceptExtension.KEY_NAME))).getValue();
// TODO It must become a user-specified parameter, not a constant
if (CONSIDER_EVENT_LIFECYCLE) {
if (event.getAttributes().get(XLifecycleExtension.KEY_TRANSITION) != null) {
evtIdentifier = evtIdentifier + EVENT_TRANSITION_STATE_INFIX + event.getAttributes().get(XLifecycleExtension.KEY_TRANSITION);
}
}
outTrace.add(evtIdentifier);
debugSBuffer.append(evtIdentifier);
debugSBuffer.append(", ");
}
}
outTraces.add(outTrace);
debugSBuffer.delete(debugSBuffer.length() -2, debugSBuffer.length());
debugSBuffer.append(">");
}
}
logger.trace(debugSBuffer.toString());
return outTraces;
}
} | 4,022 | 35.908257 | 145 | java |
Janus | Janus-master/src/trashbin/minerful/io/encdec/declaremap/OldConDecEncoder.java | package trashbin.minerful.io.encdec.declaremap;
import java.util.HashMap;
import minerful.concept.ProcessModel;
import minerful.concept.TaskChar;
import minerful.concept.constraint.Constraint;
import minerful.concept.constraint.MetaConstraintUtils;
import minerful.concept.constraint.existence.Init;
import minerful.concept.constraint.existence.Participation;
import minerful.concept.constraint.existence.AtMostOne;
import minerful.concept.constraint.relation.*;
import org.apache.commons.lang3.StringEscapeUtils;
@Deprecated
public class OldConDecEncoder {
private ProcessModel process;
public static HashMap<Class<? extends Constraint>, String> NAME_CONVERTER =
new HashMap<Class<? extends Constraint>, String>(
MetaConstraintUtils.NUMBER_OF_DISCOVERABLE_EXISTENCE_CONSTRAINT_TEMPLATES + MetaConstraintUtils.NUMBER_OF_DISCOVERABLE_RELATION_CONSTRAINT_TEMPLATES,
1.0F);
public static HashMap<Class<? extends Constraint>, String> LTL_CONVERTER =
new HashMap<Class<? extends Constraint>, String>(
MetaConstraintUtils.NUMBER_OF_DISCOVERABLE_EXISTENCE_CONSTRAINT_TEMPLATES + MetaConstraintUtils.NUMBER_OF_DISCOVERABLE_RELATION_CONSTRAINT_TEMPLATES,
1.0F);
static {
// NAME_CONVERTER.put(End.class, "end");
NAME_CONVERTER.put(Init.class, "init");
NAME_CONVERTER.put(Participation.class, "existence");
NAME_CONVERTER.put(AtMostOne.class, "absence2");
// Relation
NAME_CONVERTER.put(RespondedExistence.class, "responded existence");
NAME_CONVERTER.put(Response.class, "response");
NAME_CONVERTER.put(Precedence.class, "precedence");
NAME_CONVERTER.put(AlternateResponse.class, "alternate response");
NAME_CONVERTER.put(AlternatePrecedence.class, "alternate precedence");
NAME_CONVERTER.put(ChainResponse.class, "chain response");
NAME_CONVERTER.put(ChainPrecedence.class, "chain precedence");
// Mutual relation
NAME_CONVERTER.put(CoExistence.class, "coexistence");
NAME_CONVERTER.put(Succession.class, "succession");
NAME_CONVERTER.put(AlternateSuccession.class, "alternate succession");
NAME_CONVERTER.put(ChainSuccession.class, "chain succession");
// Negation relation
NAME_CONVERTER.put(NotCoExistence.class, "not coexistence");
NAME_CONVERTER.put(NotChainSuccession.class, "not chain succession");
NAME_CONVERTER.put(NotSuccession.class, "not succession");
// LTL_CONVERTER.put(End.class, "end");
LTL_CONVERTER.put(Init.class, "( ( \"A.started\" \\/ \"A.cancelled\" ) W \"A\" )");
LTL_CONVERTER.put(Participation.class, "<> ( \"A\" )");
LTL_CONVERTER.put(AtMostOne.class, "! ( <> ( ( \"A\" /\\ X(<>(\"A\")) ) ) )");
// Relation
LTL_CONVERTER.put(RespondedExistence.class, "( <>(\"A\") -> <>( \"B\" ) )");
LTL_CONVERTER.put(Response.class, "[]( ( \"A\" -> <>( \"B\" ) ) )");
LTL_CONVERTER.put(Precedence.class, "( ! (\"B\" ) U \"A\" ) \\/ ([](!(\"B\"))) /\\ ! (\"B\" )");
LTL_CONVERTER.put(AlternateResponse.class, "[]( ( \"A\" -> X(( !(\"A\") U \"B\" ))))");
LTL_CONVERTER.put(AlternatePrecedence.class, "( ( !( \"B\" ) U \"A\" ) ) /\\ ( ( \"B\" -> X( ( !( \"B\" ) U \"A\" ) ) ) ) ) /\\ ! (\"B\" )");
LTL_CONVERTER.put(ChainResponse.class, "[] ( ( \"A\" -> X( \"B\" ) ) )");
LTL_CONVERTER.put(ChainPrecedence.class, "[]( ( X( \"B\" ) -> \"A\") )/\\ ! (\"B\" )");
// Mutual relation
LTL_CONVERTER.put(CoExistence.class, "( ( <>(\"A\") -> <>( \"B\" ) ) /\\ ( <>(\"B\") -> <>( \"A\" ) ) )");
LTL_CONVERTER.put(Succession.class, "( []( ( \"A\" -> <>( \"B\" ) ) ) /\\ (( ( !(\"B\" ) U \"A\" )) \\/ ([](!(\"B\")))) )");
LTL_CONVERTER.put(AlternateSuccession.class, "( []( ( \"A\" -> X(( ! ( \"A\" ) U \"B\" ) )) ) /\\ ( ( (! ( \"B\" ) U \"A\") \\/ ([](!(\"B\"))) ) /\\ [] ( ( \"B\" -> X( ( ( ! ( \"B\" ) U \"A\" )\\/([](!(\"B\"))) )) ) ) ) )");
LTL_CONVERTER.put(ChainSuccession.class, "[]( ( \"A\" = X( \"B\" ) ) )");
// Negation relation
LTL_CONVERTER.put(NotCoExistence.class, "(<>A) -> (!(<>B))");
LTL_CONVERTER.put(NotChainSuccession.class, "[]( ( \"A\" -> X( !( \"B\" ) ) ) )");
LTL_CONVERTER.put(NotSuccession.class, "[]( ( \"A\" -> !( <>( \"B\" ) ) ) )");
}
public static String PROCESS_DEF_BEGIN_TEMPLATE =
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>" + "\n" +
"<model>" + "\n" +
" <assignment language=\"ConDec\" name=\"%s\">" + "\n";
public static String ACTIVITY_DEF_GROUP_BEGIN_STRING =
" <activitydefinitions>" + "\n";
public static String ACTIVITY_DEF_TEMPLATE =
" <activity id=\"%1$s\" name=\"%2$s\" />" + "\n";
public static String ACTIVITY_DEF_GROUP_END_STRING =
" </activitydefinitions>" + "\n";
public static String CONSTRAINTS_GROUP_BEGIN_STRING =
" <constraintdefinitions>" + "\n";
public static String CONSTRAINT_DEF_BEGIN_TEMPLATE =
" <constraint id=\"%1$d\" mandatory=\"true\">" + "\n" +
" <condition />" + "\n" +
" <name>%2$s</name>" + "\n";
public static String TEMPLATE_DEF_BEGIN_TEMPLATE =
" <template>" + "\n" +
" <description>%1$s constraint</description>" + "\n" +
" <display>%1$s</display>" + "\n" +
" <name>%1$s</name>" + "\n" +
" <text>%2$s</text>" + "\n";
public static String PARAMETERS_GROUP_BEGIN_STRING =
" <parameters>" + "\n";
public static String CONSTRAINT_PARAMETER_DEF_TEMPLATE =
" <parameter branchable=\"%3$s\" id=\"%1$d\" name=\"%2$s\">" + "\n" +
// TODO The graphical part consists in dirty cheating
" <graphical>" + "\n" +
" <style number=\"1\" />" + "\n" +
" <begin fill=\"true\" style=\"10\" />" + "\n" +
" <middle fill=\"false\" style=\"0\" />" + "\n" +
" <end fill=\"false\" style=\"0\" />" + "\n" +
" </graphical>" + "\n" +
" </parameter>" + "\n";
public static String PARAMETERS_GROUP_END_STRING =
" </parameters>" + "\n";
public static String STATE_MESSAGES_DEF_STRING =
" <statemessages>" + "\n" +
" <message state=\"VIOLATED_TEMPORARY\">VIOLATED_TEMPORARY undefined</message>" + "\n" +
" <message state=\"SATISFIED\">SATISFIED undefined</message>" + "\n" +
" <message state=\"VIOLATED\">VIOLATED undefined</message>" + "\n" +
" </statemessages>" + "\n";
public static String TEMPLATE_DEF_END_STRING =
" </template>" + "\n";
public static String ACTUAL_PARAMETERS_GROUP_BEGIN_STRING =
" <constraintparameters>" + "\n";
public static String ACTUAL_PARAMETER_DEF_TEMPLATE =
" <parameter templateparameter=\"%1$d\">" + "\n" +
" <branches>" + "\n" +
" <branch name=\"%2$s\" />" + "\n" +
" </branches>" + "\n" +
" </parameter>" + "\n";
public static String ACTUAL_PARAMETERS_GROUP_END_STRING =
" </constraintparameters>" + "\n";
public static String CONSTRAINT_DEF_END_STRING =
" </constraint>" + "\n";
public static String CONSTRAINTS_GROUP_END_STRING =
" </constraintdefinitions>" + "\n";
// TODO The graphical part consists in dirty cheating
public static String GRAPHICAL_DETAILS_BEGIN_STRING =
" <graphical>" + "\n";
public static String ACTIVITIES_GRAPHICAL_DETAILS_GROUP_BEGIN_STRING =
" <activities>" + "\n";
public static String ACTIVITY_GRAPHICAL_DETAILS_TEMPLATE =
" <cell height=\"30.0\" id=\"%1$s\" width=\"95.0\" x=\"610.3032983966159\" y=\"68.04626585205398\" />" + "\n";
public static String ACTIVITIES_GRAPHICAL_DETAILS_GROUP_END_STRING =
" </activities>" + "\n";
public static String CONSTRAINTS_GRAPHICAL_DETAILS_GROUP_BEGIN_STRING =
" <constraints>" + "\n";
public static String CONSTRAINT_GRAPHICAL_DETAILS_TEMPLATE =
" <cell height=\"1.0\" id=\"%1$d\" width=\"1.0\" x=\"705.9282992379913\" y=\"213.1872845786157\" />" + "\n";
public static String CONSTRAINTS_GRAPHICAL_DETAILS_GROUP_END_STRING =
" </constraints>" + "\n";
public static String GRAPHICAL_DETAILS_END_STRING =
" </graphical>" + "\n";
public static String PROCESS_DEF_END_STRING =
" </assignment>" + "\n" +
"</model>" + "\n";
public static String COMPLETE_EVENT_SUFFIX = "-complete";
public OldConDecEncoder(ProcessModel process) {
this.process = process;
}
private String makeItACompleteEvent(TaskChar tCh) {
return StringEscapeUtils.escapeXml(tCh.getName()) + COMPLETE_EVENT_SUFFIX;
}
public String encode() {
StringBuilder sBuil = new StringBuilder();
sBuil.append(String.format(PROCESS_DEF_BEGIN_TEMPLATE, this.process.getName()));
sBuil.append(ACTIVITY_DEF_GROUP_BEGIN_STRING);
for (TaskChar tCh : this.process.bag.getTaskChars()) {
sBuil.append(String.format(ACTIVITY_DEF_TEMPLATE, StringEscapeUtils.escapeXml(tCh.identifier.toString()), this.makeItACompleteEvent(tCh)));
}
sBuil.append(ACTIVITY_DEF_GROUP_END_STRING);
sBuil.append(CONSTRAINTS_GROUP_BEGIN_STRING);
int i = 1, j = 1, k = 1;
for (TaskChar tCh : this.process.bag.getTaskChars()) {
for (Constraint c : this.process.bag.getConstraintsOf(tCh)) {
j = 1;
k = 1;
char ci = 'A';
String declareConstraintName = (
NAME_CONVERTER.containsKey(c.getClass())
? StringEscapeUtils.escapeXml(NAME_CONVERTER.get(c.getClass()))
: null);
if (declareConstraintName != null) {
sBuil.append(String.format(CONSTRAINT_DEF_BEGIN_TEMPLATE, i++, declareConstraintName));
sBuil.append(String.format(TEMPLATE_DEF_BEGIN_TEMPLATE, declareConstraintName, StringEscapeUtils.escapeXml(LTL_CONVERTER.get(c.getClass()))));
sBuil.append(PARAMETERS_GROUP_BEGIN_STRING);
sBuil.append(String.format(CONSTRAINT_PARAMETER_DEF_TEMPLATE, j++, ci++,
((c instanceof RelationConstraint) ? "true" : "false")
));
if (c instanceof RelationConstraint) {
// TODO This trick is not going to work with branching Declare constraints!
sBuil.append(String.format(CONSTRAINT_PARAMETER_DEF_TEMPLATE, j++, ci, "true"));
}
sBuil.append(PARAMETERS_GROUP_END_STRING);
sBuil.append(STATE_MESSAGES_DEF_STRING);
sBuil.append(TEMPLATE_DEF_END_STRING);
sBuil.append(ACTUAL_PARAMETERS_GROUP_BEGIN_STRING);
sBuil.append(String.format(ACTUAL_PARAMETER_DEF_TEMPLATE, k++, this.makeItACompleteEvent(c.getBase().getTaskCharsArray()[0])));
if (c instanceof RelationConstraint) {
sBuil.append(String.format(ACTUAL_PARAMETER_DEF_TEMPLATE, k++, this.makeItACompleteEvent(((RelationConstraint) c).getImplied().getTaskCharsArray()[0])));
}
sBuil.append(ACTUAL_PARAMETERS_GROUP_END_STRING);
sBuil.append(CONSTRAINT_DEF_END_STRING);
}
}
}
sBuil.append(CONSTRAINTS_GROUP_END_STRING);
sBuil.append(GRAPHICAL_DETAILS_BEGIN_STRING);
sBuil.append(ACTIVITIES_GRAPHICAL_DETAILS_GROUP_BEGIN_STRING);
for (TaskChar tCh : this.process.bag.getTaskChars()) {
sBuil.append(String.format(ACTIVITY_GRAPHICAL_DETAILS_TEMPLATE, StringEscapeUtils.escapeXml(tCh.identifier.toString())));
}
sBuil.append(ACTIVITIES_GRAPHICAL_DETAILS_GROUP_END_STRING);
sBuil.append(CONSTRAINTS_GRAPHICAL_DETAILS_GROUP_BEGIN_STRING);
i = 1;
for (TaskChar tCh : this.process.bag.getTaskChars()) {
for (Constraint c : this.process.bag.getConstraintsOf(tCh)) {
if (NAME_CONVERTER.containsKey(c.getClass())) {
sBuil.append(String.format(CONSTRAINT_GRAPHICAL_DETAILS_TEMPLATE, i++));
}
}
}
sBuil.append(CONSTRAINTS_GRAPHICAL_DETAILS_GROUP_END_STRING);
sBuil.append(GRAPHICAL_DETAILS_END_STRING);
sBuil.append(PROCESS_DEF_END_STRING);
return sBuil.toString();
}
} | 11,626 | 47.045455 | 226 | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.