repo
stringlengths 1
191
⌀ | file
stringlengths 23
351
| code
stringlengths 0
5.32M
| file_length
int64 0
5.32M
| avg_line_length
float64 0
2.9k
| max_line_length
int64 0
288k
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/dto/UserOrganismPermissionInfo.java
|
package org.bbop.apollo.gwt.client.dto;
import com.google.gwt.json.client.JSONBoolean;
import com.google.gwt.json.client.JSONNumber;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONString;
/**
* Created by ndunn on 3/24/15.
*/
public class UserOrganismPermissionInfo extends OrganismPermissionInfo{
Long userId ;
public Long getUserId() {
return userId;
}
public void setUserId(Long userId) {
this.userId = userId;
}
public JSONObject toJSON() {
JSONObject payload = new JSONObject();
payload.put("organism",new JSONString(organismName));
payload.put("ADMINISTRATE",JSONBoolean.getInstance(admin));
payload.put("WRITE",JSONBoolean.getInstance(write));
payload.put("EXPORT",JSONBoolean.getInstance(export));
payload.put("READ",JSONBoolean.getInstance(read));
if(userId!=null){
payload.put("userId",new JSONNumber(userId));
}
if(id!=null){
payload.put("id",new JSONNumber(id));
}
return payload;
}
}
| 1,098 | 27.179487 | 71 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/dto/VariantPropertyInfo.java
|
package org.bbop.apollo.gwt.client.dto;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
import com.google.gwt.json.client.JSONString;
import com.google.gwt.json.client.JSONObject;
/**
* Created by deepak.unni3 on 9/16/16.
*/
public class VariantPropertyInfo {
private String tag;
private String value;
public VariantPropertyInfo() {
}
public VariantPropertyInfo(JSONObject variantPropertyInfoJsonObject) {
String tag = variantPropertyInfoJsonObject.get(FeatureStringEnum.TAG.getValue()).isString().stringValue();
this.tag = tag;
String value = null ;
if(variantPropertyInfoJsonObject.containsKey(FeatureStringEnum.VALUE.getValue())){
value = variantPropertyInfoJsonObject.get(FeatureStringEnum.VALUE.getValue()).isString().stringValue();
}
this.value = value;
}
public String getTag() { return this.tag; }
public void setTag(String tag) { this.tag = tag; }
public String getValue() { return this.value; }
public void setValue(String value) { this.value = value; }
public JSONObject convertToJsonObject() {
JSONObject variantPropertyJsonObject = new JSONObject();
variantPropertyJsonObject.put(FeatureStringEnum.TAG.getValue(), new JSONString(this.tag));
variantPropertyJsonObject.put(FeatureStringEnum.VALUE.getValue(), new JSONString(this.value));
return variantPropertyJsonObject;
}
}
| 1,446 | 31.155556 | 115 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/AnnotationInfoChangeEvent.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.event.shared.GwtEvent;
import org.bbop.apollo.gwt.client.dto.AnnotationInfo;
/**
* Created by ndunn on 2/2/15.
*/
public class AnnotationInfoChangeEvent extends GwtEvent<AnnotationInfoChangeEventHandler>{
public static Type<AnnotationInfoChangeEventHandler> TYPE = new Type<AnnotationInfoChangeEventHandler>();
private AnnotationInfo annotationInfo ;
private Action action ;
public enum Action{
UPDATE,
INSERT,
DELETE,
SET_FOCUS,
}
public AnnotationInfoChangeEvent(AnnotationInfo annotationInfo,Action action){
this.annotationInfo = annotationInfo ;
this.action = action ;
}
@Override
public Type<AnnotationInfoChangeEventHandler> getAssociatedType() {
return TYPE ;
}
@Override
protected void dispatch(AnnotationInfoChangeEventHandler handler) {
handler.onAnnotationChanged(this);
}
public AnnotationInfo getAnnotationInfo() {
return annotationInfo;
}
public void setAnnotationInfo(AnnotationInfo annotationInfo) {
this.annotationInfo = annotationInfo;
}
public Action getAction() {
return action;
}
public void setAction(Action action) {
this.action = action;
}
}
| 1,323 | 23.518519 | 109 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/AnnotationInfoChangeEventHandler.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.event.shared.EventHandler;
/**
* Created by ndunn on 1/19/15.
*/
public interface AnnotationInfoChangeEventHandler extends EventHandler{
void onAnnotationChanged(AnnotationInfoChangeEvent annotationInfoChangeEvent);
}
| 293 | 20 | 82 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/ExportEvent.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.event.shared.GwtEvent;
import org.bbop.apollo.gwt.client.dto.OrganismInfo;
import org.bbop.apollo.gwt.client.dto.SequenceInfo;
import java.util.List;
/**
* Created by ndunn on 1/19/15.
*/
public class ExportEvent extends GwtEvent<ExportEventHandler> {
public static Type<ExportEventHandler> TYPE = new Type<ExportEventHandler>();
public enum Action {
EXPORT_READY,
EXPORT_FINISHED,
}
public enum Flavor{
GFF3,
FASTA,
CHADO,
}
private Action thisAction;
private Flavor thisFlavor;
private OrganismInfo organismInfo ;
private List<SequenceInfo> sequenceInfoList ;
@Override
public Type<ExportEventHandler> getAssociatedType() {
return TYPE;
}
@Override
protected void dispatch(ExportEventHandler handler) {
handler.onExport(this);
}
public ExportEvent(Action action,Flavor flavor,OrganismInfo organismInfo,List<SequenceInfo> sequenceInfoList) {
this.thisAction = action;
this.thisFlavor = flavor ;
this.organismInfo = organismInfo ;
this.sequenceInfoList = sequenceInfoList ;
}
}
| 1,211 | 23.24 | 115 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/ExportEventHandler.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.event.shared.EventHandler;
/**
* Created by ndunn on 1/19/15.
*/
public interface ExportEventHandler extends EventHandler{
void onExport(ExportEvent exportEvent);
}
| 240 | 16.214286 | 57 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/GroupChangeEvent.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.event.shared.GwtEvent;
import org.bbop.apollo.gwt.client.dto.GroupInfo;
import java.util.List;
/**
* Created by ndunn on 1/19/15.
*/
public class GroupChangeEvent extends GwtEvent<GroupChangeEventHandler>{
public static Type<GroupChangeEventHandler> TYPE = new Type<GroupChangeEventHandler>();
private List<GroupInfo> groupInfoList;
private Action action ;
private String group ;
public GroupChangeEvent(){}
public GroupChangeEvent(List<GroupInfo> groupInfoList, Action action, String group){
this.groupInfoList = groupInfoList ;
this.action = action ;
this.group = group ;
}
public GroupChangeEvent(List<GroupInfo> groupInfoList, Action action){
this.groupInfoList = groupInfoList ;
this.action = action ;
}
public GroupChangeEvent(Action action) {
this.action = action;
}
public List<GroupInfo> getGroupInfoList() {
return groupInfoList;
}
public void setGroupInfoList(List<GroupInfo> groupInfoList) {
this.groupInfoList = groupInfoList;
}
@Override
public Type<GroupChangeEventHandler> getAssociatedType() {
return TYPE;
}
@Override
protected void dispatch(GroupChangeEventHandler handler) {
handler.onGroupChanged(this);
}
public Action getAction() {
return action;
}
public String getGroup() {
return group;
}
public enum Action{
ADD_USER_TO_GROUP,
REMOVE_USER_FROM_GROUP,
RELOAD_GROUPS,
ADD_GROUP,
REMOVE_GROUP,
GROUPS_RELOADED,
}
}
| 1,672 | 22.9 | 91 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/GroupChangeEventHandler.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.event.shared.EventHandler;
/**
* Created by ndunn on 1/19/15.
*/
public interface GroupChangeEventHandler extends EventHandler{
void onGroupChanged(GroupChangeEvent authenticationEvent);
}
| 263 | 19.307692 | 62 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/OrganismChangeEvent.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.event.shared.GwtEvent;
import org.bbop.apollo.gwt.client.dto.OrganismInfo;
import java.util.List;
/**
* Created by ndunn on 1/19/15.
*/
public class OrganismChangeEvent extends GwtEvent<OrganismChangeEventHandler>{
public static Type<OrganismChangeEventHandler> TYPE = new Type<OrganismChangeEventHandler>();
public List<OrganismInfo> organismInfoList;
private Action action;
private String currentSequence;
private String currentOrganism;
public OrganismChangeEvent(){}
public OrganismChangeEvent(List<OrganismInfo> organismInfoList){
this.organismInfoList = organismInfoList ;
}
public OrganismChangeEvent(Action action) {
this.action = action;
}
public OrganismChangeEvent(Action changedOrganism, String sequenceNameString,String organismNameString) {
this.action = changedOrganism ;
this.currentSequence = sequenceNameString ;
this.currentOrganism = organismNameString ;
}
public String getCurrentOrganism() {
return currentOrganism;
}
public List<OrganismInfo> getOrganismInfoList() {
return organismInfoList;
}
public void setOrganismInfoList(List<OrganismInfo> organismInfoList) {
this.organismInfoList = organismInfoList;
}
@Override
public Type<OrganismChangeEventHandler> getAssociatedType() {
return TYPE;
}
@Override
protected void dispatch(OrganismChangeEventHandler handler) {
handler.onOrganismChanged(this);
}
public void setAction(Action action) {
this.action = action;
}
public Action getAction() {
return action;
}
public String getCurrentSequence() {
return currentSequence;
}
public enum Action {
CHANGED_ORGANISM, LOADED_ORGANISMS
}
}
| 1,882 | 24.106667 | 109 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/OrganismChangeEventHandler.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.event.shared.EventHandler;
/**
* Created by ndunn on 1/19/15.
*/
public interface OrganismChangeEventHandler extends EventHandler{
void onOrganismChanged(OrganismChangeEvent authenticationEvent);
}
| 273 | 18.571429 | 68 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/UserChangeEvent.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.shared.GwtEvent;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import org.bbop.apollo.gwt.client.dto.OrganismInfo;
import org.bbop.apollo.gwt.client.dto.UserInfo;
import org.bbop.apollo.gwt.shared.PermissionEnum;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by ndunn on 1/19/15.
*/
public class UserChangeEvent extends GwtEvent<UserChangeEventHandler>{
public static Type<UserChangeEventHandler> TYPE = new Type<UserChangeEventHandler>();
private List<UserInfo> userInfoList;
private Action action ;
private String group ;
private PermissionEnum highestPermission ;
public UserChangeEvent(Action action){
this.action = action ;
}
public UserChangeEvent(List<UserInfo> userInfoList,Action action,String group){
this.userInfoList = userInfoList ;
this.action = action ;
this.group = group ;
}
public UserChangeEvent(Action action,PermissionEnum highestPermission){
this.action = action ;
this.highestPermission = highestPermission ;
GWT.log(highestPermission.getDisplay());
}
public UserChangeEvent(List<UserInfo> userInfoList,Action action){
this.userInfoList = userInfoList ;
this.action = action ;
}
public List<UserInfo> getUserInfoList() {
return userInfoList;
}
public void setUserInfoList(List<UserInfo> userInfoList) {
this.userInfoList = userInfoList;
}
@Override
public Type<UserChangeEventHandler> getAssociatedType() {
return TYPE;
}
@Override
protected void dispatch(UserChangeEventHandler handler) {
handler.onUserChanged(this);
}
public Action getAction() {
return action;
}
public String getGroup() {
return group;
}
public PermissionEnum getHighestPermission() {
return highestPermission;
}
public void setHighestPermission(PermissionEnum highestPermission) {
this.highestPermission = highestPermission;
}
public enum Action{
ADD_USER_TO_GROUP,
REMOVE_USER_FROM_GROUP,
RELOAD_USERS,
PERMISSION_CHANGED,
USERS_RELOADED,
}
}
| 2,360 | 25.233333 | 89 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/event/UserChangeEventHandler.java
|
package org.bbop.apollo.gwt.client.event;
import com.google.gwt.event.shared.EventHandler;
/**
* Created by ndunn on 1/19/15.
*/
public interface UserChangeEventHandler extends EventHandler{
void onUserChanged(UserChangeEvent authenticationEvent);
}
| 260 | 19.076923 | 61 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/go/GoEvidenceCode.java
|
package org.bbop.apollo.gwt.client.go;
/**
* Matching order and descriptions here: http://geneontology.org/docs/guide-go-evidence-codes/
*/
public enum GoEvidenceCode {
EXP("experimental evidence used in manual assertion","ECO:0000269"),
IDA("direct assay evidence used in manual assertion","ECO:0000314"),
IPI("physical interaction evidence used in manual assertion","ECO:0000353"),
IMP("mutant phenotype evidence used in manual assertion","ECO:0000315"),
IGI("genetic interaction evidence used in manual assertion","ECO:0000316"),
IEP("inferred from expression pattern","ECO:0000270"),
HTP("inferred from high throughput experiment","ECO:0000270"),
HDA("inferred from high throughput direct assay","ECO:0007005"),
HMP("inferred from high throughput mutant phenotype","ECO:0007001"),
HGI("inferred from high throughput genetic interaction","ECO:0007003"),
HEP("inferred from high throughput expression pattern","ECO:0007007"),
IBA("inferred from biological aspect of ancestor","ECO:0000318"),
IBD("inferred from biological aspect of descendants","ECO:0000319"),
IKR("inferred from key residues","ECO:0000320"),
IRD("inferred from rapid divergence","ECO:0000321"),
ISS("sequence similarity evidence used in manual assertion","ECO:0000250"),
ISO("sequence orthology evidence used in manual assertion","ECO:0000266"),
ISA("sequence alignment evidence used in manual assertion","ECO:0000247"),
ISM("inferred from sequence model","ECO:0000255"),
IGC("inferred from genome context","ECO:0000317"),
RCA ("inferred from reviewed computational analysis","ECO:0000245"),
TAS("traceable author statement","ECO:0000304"),
NAS("non-traceable author statement","ECO:0000303"),
IC("inferred by curator","ECO:0000305"),
ND("no biological data available","ECO:0000307"),
IEA("evidence used in automatic assertion","ECO:0000501"),
;
GoEvidenceCode(String description, String curie) {
this.description = description;
this.curie = curie;
}
private String description;
private String curie;
public static boolean requiresWith(String evidenceCode) {
if(evidenceCode.equals(IPI.curie)) return true;
if(evidenceCode.equals(ISS.curie)) return true;
if(evidenceCode.equals(ISO.curie)) return true;
if(evidenceCode.equals(IGI.curie)) return true;
if(evidenceCode.equals(ISA.curie)) return true;
if(evidenceCode.equals(IBA.curie)) return true;
if(evidenceCode.equals(IC.curie)) return true;
return false;
}
public String getDescription() {
return description;
}
public String getCurie() {
return curie;
}
}
| 2,747 | 35.157895 | 95 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/go/GoService.java
|
package org.bbop.apollo.gwt.client.go;
public class GoService {
}
| 69 | 9 | 38 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/oracles/BiolinkLookup.java
|
package org.bbop.apollo.gwt.client.oracles;
public enum BiolinkLookup {
GO,
ECO,
RO
}
| 100 | 10.222222 | 43 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/oracles/BiolinkOntologyOracle.java
|
package org.bbop.apollo.gwt.client.oracles;
import com.google.gwt.http.client.RequestBuilder;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.RequestException;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONParser;
import com.google.gwt.user.client.ui.MultiWordSuggestOracle;
import org.bbop.apollo.gwt.client.go.GoEvidenceCode;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Created by ndunn on 4/24/15.
*/
public class BiolinkOntologyOracle extends MultiWordSuggestOracle {
public final static String BIOLINK_AUTOCOMPLETE_URL = "https://api.geneontology.org/api/search/entity/autocomplete/";
public final static String ECO_BASE = "http://www.evidenceontology.org/term/";
public final static String GO_BASE = "http://amigo.geneontology.org/amigo/term/";
public final static String RO_BASE = "http://www.ontobee.org/ontology/RO?iri=http://purl.obolibrary.org/obo/";
private final String prefix;
private final String baseUrl;
private String category = null;
private Boolean useAllEco = false;
public BiolinkOntologyOracle() {
this(BiolinkLookup.ECO, ECO_BASE);
}
public BiolinkOntologyOracle(BiolinkLookup biolinkLookup) {
this(biolinkLookup, null);
}
public BiolinkOntologyOracle(BiolinkLookup biolinkLookup, String baseUrl) {
super();
this.prefix = biolinkLookup.name();
if (baseUrl != null) {
this.baseUrl = baseUrl;
} else {
switch (biolinkLookup) {
case ECO:
this.baseUrl = ECO_BASE;
break;
case GO:
this.baseUrl = GO_BASE;
break;
case RO:
this.baseUrl = RO_BASE;
break;
default:
this.baseUrl = null;
}
}
}
private void requestRemoteData(final Request suggestRequest, final Callback suggestCallback) {
Integer ROWS = 40;
String url = BIOLINK_AUTOCOMPLETE_URL + suggestRequest.getQuery() + "?rows=" + ROWS;
if (prefix != null) {
url += "&prefix=" + prefix;
}
if (category != null) {
url += "&category=" + category;
}
RequestBuilder rb = new RequestBuilder(RequestBuilder.GET, url);
try {
rb.sendRequest(null, new RequestCallback() {
@Override
public void onResponseReceived(com.google.gwt.http.client.Request request, com.google.gwt.http.client.Response response) {
JSONArray jsonArray = JSONParser.parseStrict(response.getText()).isObject().get("docs").isArray();
List<Suggestion> suggestionList = new ArrayList<>();
Set<String> ids = new HashSet<>();
for (int i = 0; i < jsonArray.size(); i++) {
final JSONObject jsonObject = jsonArray.get(i).isObject();
final String id = jsonObject.get("id").isString().stringValue();
if (!ids.contains(id)) {
Suggestion suggestion = new Suggestion() {
@Override
public String getDisplayString() {
String displayString = jsonObject.get("label").isArray().get(0).isString().stringValue();
displayString = displayString.replaceAll(suggestRequest.getQuery(), "<b><em>" + suggestRequest.getQuery() + "</em></b>");
displayString += " (" + id + ") ";
return displayString;
}
@Override
public String getReplacementString() {
return id;
}
};
suggestionList.add(suggestion);
}
}
Response r = new Response();
r.setSuggestions(suggestionList);
suggestCallback.onSuggestionsReady(suggestRequest, r);
}
@Override
public void onError(com.google.gwt.http.client.Request request, Throwable exception) {
Bootbox.alert("Error: " + exception);
}
});
} catch (RequestException e) {
e.printStackTrace();
Bootbox.alert("Request exception via " + e);
}
}
private void requestDefaultGo(final Request suggestRequest, final Callback suggestCallback) {
List<Suggestion> suggestionList = new ArrayList<>();
String query = suggestRequest.getQuery().toLowerCase().trim();
for (final GoEvidenceCode goEvidenceCode : GoEvidenceCode.values()) {
if (goEvidenceCode.name().toLowerCase().contains(query)
|| goEvidenceCode.getCurie().toLowerCase().contains(query)
|| goEvidenceCode.getDescription().toLowerCase().contains(query)
|| query.length()==0
) {
Suggestion suggestion = new Suggestion() {
@Override
public String getDisplayString() {
String displayString = goEvidenceCode.name() + " (" + goEvidenceCode.getCurie() +"): " + goEvidenceCode.getDescription();
displayString = displayString.replaceAll(suggestRequest.getQuery(), "<em>" + suggestRequest.getQuery() + "</em>");
displayString = "<div style='font-weight:boldest;font-size:larger;'>" + displayString + "</div>";
return displayString;
}
@Override
public String getReplacementString() {
return goEvidenceCode.getCurie();
}
};
suggestionList.add(suggestion);
}
}
Response r = new Response();
r.setSuggestions(suggestionList);
suggestCallback.onSuggestionsReady(suggestRequest, r);
}
@Override
public void requestSuggestions(final Request suggestRequest, final Callback suggestCallback) {
if (!useAllEco && baseUrl.equals(ECO_BASE)) {
requestDefaultGo(suggestRequest, suggestCallback);
} else {
requestRemoteData(suggestRequest, suggestCallback);
}
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public void setUseAllEco(Boolean useAllEco) {
this.useAllEco = useAllEco;
}
}
| 7,068 | 39.164773 | 157 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/oracles/BiolinkSuggestBox.java
|
package org.bbop.apollo.gwt.client.oracles;
import org.gwtbootstrap3.client.ui.SuggestBox;
public class BiolinkSuggestBox extends SuggestBox {
public BiolinkSuggestBox(BiolinkOntologyOracle oracle) {
super(oracle);
}
@Override
public void showSuggestionList() {
if(getText().length()>=0){
super.showSuggestionList();
}
}
}
| 384 | 19.263158 | 60 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/oracles/ReferenceSequenceOracle.java
|
package org.bbop.apollo.gwt.client.oracles;
import com.google.gwt.core.client.GWT;
import com.google.gwt.http.client.RequestBuilder;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.RequestException;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONParser;
import com.google.gwt.user.client.ui.MultiWordSuggestOracle;
import com.google.gwt.user.client.ui.SuggestOracle;
import org.bbop.apollo.gwt.client.Annotator;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ndunn on 4/24/15.
*/
public class ReferenceSequenceOracle extends MultiWordSuggestOracle{
private final String rootUrl = Annotator.getRootUrl() + "sequence/lookupSequenceByName/?q=";
@Override
public void requestSuggestions(final SuggestOracle.Request suggestRequest, final Callback suggestCallback) {
String url = rootUrl+ suggestRequest.getQuery();
url += "&clientToken="+ Annotator.getClientToken();
RequestBuilder rb = new RequestBuilder(RequestBuilder.GET, url);
// rb.setHeader("Content-type", "application/x-www-form-urlencoded");
try {
rb.sendRequest(null, new RequestCallback() {
@Override
public void onResponseReceived(com.google.gwt.http.client.Request request, com.google.gwt.http.client.Response response) {
GWT.log(response.getText());
JSONArray jsonArray = JSONParser.parseStrict(response.getText()).isArray();
createSuggestion(response.getText(), response.getText());
List<Suggestion> suggestionList = new ArrayList<>();
for(int i = 0 ; i < jsonArray.size() ; i++){
final String value = jsonArray.get(i).isString().stringValue();
Suggestion suggestion = new Suggestion() {
@Override
public String getDisplayString() {
return value ;
}
@Override
public String getReplacementString() {
return value ;
}
};
suggestionList.add(suggestion);
}
SuggestOracle.Response r = new SuggestOracle.Response();
r.setSuggestions(suggestionList);
suggestCallback.onSuggestionsReady(suggestRequest,r);
}
@Override
public void onError(com.google.gwt.http.client.Request request, Throwable exception) {
Bootbox.alert("Error: "+exception);
}
});
} catch (RequestException e) {
e.printStackTrace();
Bootbox.alert("Request exception via " + e);
}
}
}
| 3,011 | 39.16 | 138 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/resources/TableResources.java
|
package org.bbop.apollo.gwt.client.resources;
import com.google.gwt.resources.client.ClientBundle;
import org.gwtbootstrap3.client.ui.gwt.DataGrid;
/**
* Created by ndunn on 12/19/14.
*/
public class TableResources implements ClientBundle{
// TableCss cell table
public interface TableCss extends DataGrid.Resources
{
@ClientBundle.Source({DataGrid.Style.DEFAULT_CSS,
"org/bbop/apollo/gwt/client/resources/Table.css"})
DataGridStyle dataGridStyle();
interface DataGridStyle extends DataGrid.Style {}
}
}
| 565 | 28.789474 | 66 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/resources/TreeResources.java
|
package org.bbop.apollo.gwt.client.resources;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.user.cellview.client.CellTree;
public interface TreeResources extends CellTree.Resources {
@ClientBundle.Source("org/bbop/apollo/gwt/client/resources/Tree.css")
public CellTree.Style cellTreeStyle();
}
| 333 | 32.4 | 73 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/AnnotationRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.core.client.GWT;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.Response;
import com.google.gwt.json.client.*;
import org.bbop.apollo.gwt.client.Annotator;
import org.bbop.apollo.gwt.client.AnnotatorPanel;
import org.bbop.apollo.gwt.client.VariantDetailPanel;
import org.bbop.apollo.gwt.client.dto.*;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
import org.bbop.apollo.gwt.shared.geneProduct.GeneProduct;
import org.bbop.apollo.gwt.shared.go.GoAnnotation;
import org.bbop.apollo.gwt.shared.provenance.Provenance;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import java.util.List;
import java.util.Set;
/**
* Created by ndunn on 1/28/15.
*/
public class AnnotationRestService extends RestService {
public static JSONObject convertAnnotationInfoToJSONObject(AnnotationInfo annotationInfo) {
JSONObject jsonObject = new JSONObject();
jsonObject.put(FeatureStringEnum.NAME.getValue(), new JSONString(annotationInfo.getName()));
jsonObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(annotationInfo.getUniqueName()));
jsonObject.put(FeatureStringEnum.SYMBOL.getValue(), annotationInfo.getSymbol() != null ? new JSONString(annotationInfo.getSymbol()) : new JSONString(""));
jsonObject.put(FeatureStringEnum.STATUS.getValue(), annotationInfo.getStatus() != null ? new JSONString(annotationInfo.getStatus()) : null);
jsonObject.put(FeatureStringEnum.DESCRIPTION.getValue(), annotationInfo.getDescription() != null ? new JSONString(annotationInfo.getDescription()) : new JSONString(""));
jsonObject.put(FeatureStringEnum.TYPE.getValue(), new JSONString(annotationInfo.getType()));
jsonObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
jsonObject.put(FeatureStringEnum.SYNONYMS.getValue(), annotationInfo.getSynonyms() != null ? new JSONString(annotationInfo.getSynonyms()) : null );
if (VariantDetailPanel.variantTypes.contains(annotationInfo.getType())) {
if (annotationInfo.getReferenceAllele() != null)
jsonObject.put(FeatureStringEnum.REFERENCE_ALLELE.getValue(), new JSONString(annotationInfo.getReferenceAllele()));
if (annotationInfo.getAlternateAlleles() != null)
jsonObject.put(FeatureStringEnum.ALTERNATE_ALLELES.getValue(), annotationInfo.getAlternateAllelesAsJsonArray());
if (annotationInfo.getVariantProperties() != null)
jsonObject.put(FeatureStringEnum.VARIANT_INFO.getValue(), annotationInfo.getVariantPropertiesAsJsonArray());
}
jsonObject.put(FeatureStringEnum.FMIN.getValue(), annotationInfo.getMin() != null ? new JSONNumber(annotationInfo.getMin()) : null);
jsonObject.put(FeatureStringEnum.FMAX.getValue(), annotationInfo.getMax() != null ? new JSONNumber(annotationInfo.getMax()) : null);
jsonObject.put(FeatureStringEnum.IS_FMIN_PARTIAL.getValue(), JSONBoolean.getInstance(annotationInfo.getPartialMin()) );
jsonObject.put(FeatureStringEnum.IS_FMAX_PARTIAL.getValue(), JSONBoolean.getInstance(annotationInfo.getPartialMax()) );
jsonObject.put(FeatureStringEnum.OBSOLETE.getValue(), JSONBoolean.getInstance(annotationInfo.getObsolete()) );
jsonObject.put(FeatureStringEnum.STRAND.getValue(), annotationInfo.getStrand() != null ? new JSONNumber(annotationInfo.getStrand()) : null);
return jsonObject;
}
static JSONObject generateTypeObject(String type){
JSONObject featureTypeObject = new JSONObject();
JSONObject cvObject = new JSONObject();
cvObject.put(FeatureStringEnum.NAME.getValue(),new JSONString(FeatureStringEnum.SEQUENCE.getValue()));
featureTypeObject.put(FeatureStringEnum.CV.getValue(),cvObject);
featureTypeObject.put(FeatureStringEnum.NAME.getValue(),new JSONString(type));
return featureTypeObject;
}
static JSONObject generateLocationObject(AnnotationInfo annotationInfo){
JSONObject locationObject = new JSONObject();
locationObject.put(FeatureStringEnum.FMIN.getValue(), annotationInfo.getMin() != null ? new JSONNumber(annotationInfo.getMin()) : null);
locationObject.put(FeatureStringEnum.FMAX.getValue(), annotationInfo.getMax() != null ? new JSONNumber(annotationInfo.getMax()) : null);
locationObject.put(FeatureStringEnum.IS_FMIN_PARTIAL.getValue(), JSONBoolean.getInstance(annotationInfo.getPartialMin()) );
locationObject.put(FeatureStringEnum.IS_FMAX_PARTIAL.getValue(), JSONBoolean.getInstance(annotationInfo.getPartialMax()) );
locationObject.put(FeatureStringEnum.STRAND.getValue(), annotationInfo.getStrand() != null ? new JSONNumber(annotationInfo.getStrand()) : null);
return locationObject;
}
/**
* Creates a transcript with a matching exon
* @param requestCallback
* @param annotationInfo
* @return
*/
public static void createTranscriptWithExon(RequestCallback requestCallback, AnnotationInfo annotationInfo) {
JSONObject jsonObject = new JSONObject();
JSONArray featuresArray = new JSONArray();
jsonObject.put(FeatureStringEnum.SEQUENCE.getValue(),new JSONString(annotationInfo.getSequence()));
jsonObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
JSONObject featureObject = new JSONObject();
featuresArray.set(featuresArray.size(), featureObject);
// {\"track\":\"Group11.18\",\"features\":[{\"location\":{\"fmin\":3464814,\"fmax\":3464958,\"strand\":-1},\"type\":{\"cv\":{\"name\":\"sequence\"},\"name\":\"mRNA\"},\"name\":\"GB44961-RA\",\"orig_id\":\"GB44961-RA\",\"children\":[{\"location\":{\"fmin\":3464814,\"fmax\":3464958,\"strand\":-1},\"type\":{\"cv\":{\"name\":\"sequence\"},\"name\":\"exon\"}}]}],\"operation\":\"add_transcript\",\"clientToken\":\"66322431814575743501200095773\"}
JSONArray childrenArray = new JSONArray();
JSONObject childObject = new JSONObject();
childObject.put(FeatureStringEnum.LOCATION.getValue(),generateLocationObject(annotationInfo));
childObject.put(FeatureStringEnum.TYPE.getValue(),generateTypeObject("exon"));
childrenArray.set(0,childObject);
featureObject.put(FeatureStringEnum.CHILDREN.getValue(),childrenArray);
featureObject.put(FeatureStringEnum.LOCATION.getValue(),generateLocationObject(annotationInfo));
featureObject.put(FeatureStringEnum.TYPE.getValue(),generateTypeObject(annotationInfo.getType()));
featureObject.put(FeatureStringEnum.DESCRIPTION.getValue(),new JSONString("created with search hit") );
if(annotationInfo.getSynonyms()!=null){
featureObject.put(FeatureStringEnum.SYNONYMS.getValue(),new JSONString(annotationInfo.getSynonyms()) );
}
sendRequest(requestCallback, "annotationEditor/addTranscript", "data=" + jsonObject.toString());
}
public static JSONObject deleteAnnotations(RequestCallback requestCallback, Set<AnnotationInfo> annotationInfoSet) {
JSONObject jsonObject = new JSONObject();
JSONArray featuresArray = new JSONArray();
jsonObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
for (AnnotationInfo annotationInfo : annotationInfoSet) {
JSONObject uniqueNameObject = new JSONObject();
uniqueNameObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(annotationInfo.getUniqueName()));
featuresArray.set(featuresArray.size(), uniqueNameObject);
}
sendRequest(requestCallback, "annotationEditor/deleteFeature", "data=" + jsonObject.toString());
return jsonObject;
}
private static JSONObject generateSequenceObject(Set<SequenceInfo> sequenceInfoSet){
JSONObject jsonObject = new JSONObject();
JSONArray sequencesArray = new JSONArray();
jsonObject.put(FeatureStringEnum.SEQUENCE.getValue(), sequencesArray);
for (SequenceInfo sequenceInfo : sequenceInfoSet) {
JSONObject sequenceIdObject = new JSONObject();
sequenceIdObject.put(FeatureStringEnum.ID.getValue(), new JSONNumber(sequenceInfo.getId()));
sequencesArray.set(sequencesArray.size(), sequenceIdObject);
}
return jsonObject ;
}
public static JSONObject deleteAnnotationsFromSequences(RequestCallback requestCallback, Set<SequenceInfo> sequenceInfoSet) {
JSONObject jsonObject = generateSequenceObject(sequenceInfoSet);
sendRequest(requestCallback, "annotationEditor/deleteFeaturesForSequences", "data=" + jsonObject.toString());
return jsonObject;
}
public static void updateCommonPath(RequestCallback requestCallback, String directory) {
sendRequest(requestCallback, "annotator/updateCommonPath", "directory="+directory);
}
public static JSONObject deleteVariantAnnotationsFromSequences(RequestCallback requestCallback, Set<SequenceInfo> sequenceInfoSet) {
JSONObject jsonObject = generateSequenceObject(sequenceInfoSet);
sendRequest(requestCallback, "annotationEditor/deleteVariantEffectsForSequences", "data=" + jsonObject.toString());
return jsonObject;
}
public static void findAnnotationByUniqueName(RequestCallback requestCallback,String uniqueName){
String url = Annotator.getRootUrl() + "annotator/findAnnotationsForSequence/?searchUniqueName=true&annotationName="+uniqueName;
long requestIndex = AnnotatorPanel.getNextRequestIndex();
url += "&request="+requestIndex;
url += "&statusString=" ;
sendRequest(requestCallback, url);
}
public static JSONObject addFunctionalAnnotations(RequestCallback requestCallback, JSONObject jsonObject) {
RestService.sendRequest(requestCallback,"annotator/addFunctionalAnnotations","data="+jsonObject.toString());
return jsonObject;
}
}
| 9,919 | 56.674419 | 456 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/AttributeRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.core.client.GWT;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONString;
import org.bbop.apollo.gwt.client.dto.*;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
/**
* Created by ndunn on 1/14/15.
*/
public class AttributeRestService {
public static void updateAttribute(RequestCallback requestCallback, AnnotationInfo annotationInfo,AttributeInfo oldAttributeInfo,AttributeInfo newAttributeInfo) {
// 0: "SEND↵destination:/app/AnnotationNotification↵content-length:328↵↵"{\"track\":\"ctgA\",\"features\":[{\"uniquename\":\"fd57cc6a-8e29-4a48-9832-82c06bcc869c\",\"old_non_reserved_properties\":[{\"tag\":\"2222\",\"value\":\"3333\"}],\"new_non_reserved_properties\":[{\"tag\":\"777\",\"value\":\"3333\"}]}],\"operation\":\"update_non_reserved_properties\",\"clientToken\":\"18068643442091616983\"}""
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray oldAttributeJsonArray = new JSONArray();
JSONObject oldAttributeJsonObject = new JSONObject();
oldAttributeJsonObject.put(FeatureStringEnum.TAG.getValue(), new JSONString(oldAttributeInfo.getTag()));
oldAttributeJsonObject.put(FeatureStringEnum.VALUE.getValue(), new JSONString(oldAttributeInfo.getValue()));
oldAttributeJsonArray.set(0, oldAttributeJsonObject);
featureObject.put(FeatureStringEnum.OLD_NON_RESERVED_PROPERTIES.getValue(), oldAttributeJsonArray);
JSONArray newAttributeJsonArray = new JSONArray();
JSONObject newAttributeJsonObject = new JSONObject();
newAttributeJsonObject.put(FeatureStringEnum.TAG.getValue(), new JSONString(newAttributeInfo.getTag()));
newAttributeJsonObject.put(FeatureStringEnum.VALUE.getValue(), new JSONString(newAttributeInfo.getValue()));
newAttributeJsonArray.set(0, newAttributeJsonObject);
featureObject.put(FeatureStringEnum.NEW_NON_RESERVED_PROPERTIES.getValue(), newAttributeJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/updateAttribute", "data=" + requestObject.toString());
}
public static void addAttribute(RequestCallback requestCallback, AnnotationInfo annotationInfo, AttributeInfo attributeInfo) {
// {"track":"ctgA", "features":[{"uniquename":"fd57cc6a-8e29-4a48-9832-82c06bcc869c", "dbxrefs":[{"db":"asdf", "accession":"zzz"}]}]}
// 0: "SEND↵destination:/app/AnnotationNotification↵content-length:249↵↵"{\"track\":\"ctgA\",\"features\":[{\"uniquename\":\"fd57cc6a-8e29-4a48-9832-82c06bcc869c\",\"non_reserved_properties\":[{\"tag\":\"1111\",\"value\":\"222\"}]}],\"operation\":\"add_non_reserved_properties\",\"clientToken\":\"18068643442091616983\"}""
GWT.log("Adding attribute");
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray attributeJsonArray = new JSONArray();
JSONObject attributeJsonObject = new JSONObject();
attributeJsonObject.put(FeatureStringEnum.TAG.getValue(), new JSONString(attributeInfo.getTag()));
attributeJsonObject.put(FeatureStringEnum.VALUE.getValue(), new JSONString(attributeInfo.getValue()));
attributeJsonArray.set(0, attributeJsonObject);
featureObject.put(FeatureStringEnum.NON_RESERVED_PROPERTIES.getValue(), attributeJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/addAttribute", "data=" + requestObject.toString());
}
public static void deleteAttribute(RequestCallback requestCallback, AnnotationInfo annotationInfo, AttributeInfo attributeInfo) {
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray attributeJsonArray = new JSONArray();
JSONObject attributeJsonObject = new JSONObject();
attributeJsonObject.put(FeatureStringEnum.TAG.getValue(), new JSONString(attributeInfo.getTag()));
attributeJsonObject.put(FeatureStringEnum.VALUE.getValue(), new JSONString(attributeInfo.getValue()));
attributeJsonArray.set(0, attributeJsonObject);
featureObject.put(FeatureStringEnum.NON_RESERVED_PROPERTIES.getValue(), attributeJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/deleteAttribute", "data=" + requestObject.toString());
}
public static void getAttributes(RequestCallback requestCallback, AnnotationInfo annotationInfo, OrganismInfo organismInfo) {
JSONObject featureObject= new JSONObject();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(),new JSONString(annotationInfo.getUniqueName()));
featureObject.put(FeatureStringEnum.ORGANISM_ID.getValue(),new JSONString(organismInfo.getId()));
RestService.sendRequest(requestCallback, "annotationEditor/getAttributes", "data=" + featureObject.toString());
}
}
| 6,420 | 68.043011 | 408 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/AvailableStatusRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONString;
import org.bbop.apollo.gwt.client.MainPanel;
import org.bbop.apollo.gwt.client.dto.AnnotationInfo;
import org.bbop.apollo.gwt.client.dto.StatusInfo;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
/**
* Created by ndunn on 1/14/15.
*/
public class AvailableStatusRestService {
public static void updateStatus(RequestCallback requestCallback, AnnotationInfo annotationInfo,StatusInfo oldStatusInfo,StatusInfo newStatusInfo) {
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray oldStatusJsonArray = new JSONArray();
oldStatusJsonArray.set(0, new JSONString(oldStatusInfo.getStatus()));
featureObject.put(FeatureStringEnum.OLD_COMMENTS.getValue(), oldStatusJsonArray);
JSONArray newStatusJsonArray = new JSONArray();
newStatusJsonArray.set(0, new JSONString(newStatusInfo.getStatus()));
featureObject.put(FeatureStringEnum.NEW_COMMENTS.getValue(), newStatusJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/updateStatuses", "data=" + requestObject.toString());
}
public static void addStatus(RequestCallback requestCallback, AnnotationInfo annotationInfo, StatusInfo commentInfo) {
// 0: "SEND↵destination:/app/AnnotationNotification↵content-length:310↵↵"{\"track\":\"ctgA\",\"features\":[{\"uniquename\":\"fd57cc6a-8e29-4a48-9832-82c06bcc869c\",\,\"operation\":\"update_non_primary_dbxrefs\",\"clientToken\":\"18068643442091616983\"}""
// "dbxrefs\":[{\"db\":\"aasd\",\"accession\":\"12312\"}],
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray commentsJsonArray = new JSONArray();
commentsJsonArray.set(0, new JSONString(commentInfo.getStatus()));
featureObject.put(FeatureStringEnum.COMMENTS.getValue(), commentsJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/addStatuses", "data=" + requestObject.toString());
}
public static void deleteStatus(RequestCallback requestCallback, AnnotationInfo annotationInfo, StatusInfo commentInfo) {
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray dbXrefJsonArray = new JSONArray();
JSONObject dbXrefJsonObject = new JSONObject();
dbXrefJsonArray.set(0, new JSONString(commentInfo.getStatus()));
featureObject.put(FeatureStringEnum.COMMENTS.getValue(), dbXrefJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/deleteStatuses", "data=" + requestObject.toString());
}
public static void getAvailableStatuses(RequestCallback requestCallback, AnnotationInfo internalAnnotationInfo) {
getAvailableStatuses(requestCallback,internalAnnotationInfo.getType(),MainPanel.getInstance().getCurrentOrganism().getId());
}
public static void getAvailableStatuses(RequestCallback requestCallback) {
getAvailableStatuses(requestCallback,null,MainPanel.getInstance().getCurrentOrganism().getId());
}
public static void getAvailableStatuses(RequestCallback requestCallback, String type,String organismId) {
JSONObject jsonObject = new JSONObject();
if(type!=null){
jsonObject.put(FeatureStringEnum.TYPE.getValue(),new JSONString(type));
}
jsonObject.put(FeatureStringEnum.ORGANISM_ID.getValue(), new JSONString(organismId));
RestService.sendRequest(requestCallback, "annotationEditor/getAvailableStatuses", "data=" +jsonObject.toString() );
}
}
| 5,211 | 56.274725 | 273 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/CommentRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.core.client.GWT;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONString;
import org.bbop.apollo.gwt.client.MainPanel;
import org.bbop.apollo.gwt.client.dto.*;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
/**
* Created by ndunn on 1/14/15.
*/
public class CommentRestService {
public static void updateComment(RequestCallback requestCallback, AnnotationInfo annotationInfo,CommentInfo oldCommentInfo,CommentInfo newCommentInfo) {
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray oldCommentJsonArray = new JSONArray();
oldCommentJsonArray.set(0, new JSONString(oldCommentInfo.getComment()));
featureObject.put(FeatureStringEnum.OLD_COMMENTS.getValue(), oldCommentJsonArray);
JSONArray newCommentJsonArray = new JSONArray();
newCommentJsonArray.set(0, new JSONString(newCommentInfo.getComment()));
featureObject.put(FeatureStringEnum.NEW_COMMENTS.getValue(), newCommentJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/updateComments", "data=" + requestObject.toString());
}
public static void addComment(RequestCallback requestCallback, AnnotationInfo annotationInfo, CommentInfo commentInfo) {
// 0: "SEND↵destination:/app/AnnotationNotification↵content-length:310↵↵"{\"track\":\"ctgA\",\"features\":[{\"uniquename\":\"fd57cc6a-8e29-4a48-9832-82c06bcc869c\",\,\"operation\":\"update_non_primary_dbxrefs\",\"clientToken\":\"18068643442091616983\"}""
// "dbxrefs\":[{\"db\":\"aasd\",\"accession\":\"12312\"}],
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray commentsJsonArray = new JSONArray();
commentsJsonArray.set(0, new JSONString(commentInfo.getComment()));
featureObject.put(FeatureStringEnum.COMMENTS.getValue(), commentsJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/addComments", "data=" + requestObject.toString());
}
public static void deleteComment(RequestCallback requestCallback, AnnotationInfo annotationInfo, CommentInfo commentInfo) {
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray dbXrefJsonArray = new JSONArray();
JSONObject dbXrefJsonObject = new JSONObject();
dbXrefJsonArray.set(0, new JSONString(commentInfo.getComment()));
featureObject.put(FeatureStringEnum.COMMENTS.getValue(), dbXrefJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/deleteComments", "data=" + requestObject.toString());
}
public static void getComments(RequestCallback requestCallback, AnnotationInfo annotationInfo, OrganismInfo organismInfo) {
JSONObject dataObject = new JSONObject();
JSONArray featuresArray = new JSONArray();
dataObject.put(FeatureStringEnum.FEATURES.getValue(),featuresArray);
JSONObject featureObject= new JSONObject();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(),new JSONString(annotationInfo.getUniqueName()));
featureObject.put(FeatureStringEnum.ORGANISM_ID.getValue(),new JSONString(organismInfo.getId()));
featuresArray.set(0,featureObject);
RestService.sendRequest(requestCallback, "annotationEditor/getComments", "data=" + dataObject.toString());
}
public static void getCannedComments(RequestCallback requestCallback, AnnotationInfo internalAnnotationInfo) {
JSONObject jsonObject = new JSONObject();
jsonObject.put(FeatureStringEnum.TYPE.getValue(),new JSONString(internalAnnotationInfo.getType()));
jsonObject.put(FeatureStringEnum.ORGANISM_ID.getValue(), new JSONString(MainPanel.getInstance().getCurrentOrganism().getId()));
RestService.sendRequest(requestCallback, "annotationEditor/getCannedComments", "data=" +jsonObject.toString() );
}
public static void getCannedKeys(RequestCallback requestCallback, AnnotationInfo internalAnnotationInfo) {
JSONObject jsonObject = new JSONObject();
jsonObject.put(FeatureStringEnum.TYPE.getValue(),new JSONString(internalAnnotationInfo.getType()));
jsonObject.put(FeatureStringEnum.ORGANISM_ID.getValue(), new JSONString(MainPanel.getInstance().getCurrentOrganism().getId()));
RestService.sendRequest(requestCallback, "annotationEditor/getCannedKeys", "data=" +jsonObject.toString() );
}
public static void getCannedValues(RequestCallback requestCallback, AnnotationInfo internalAnnotationInfo) {
JSONObject jsonObject = new JSONObject();
jsonObject.put(FeatureStringEnum.TYPE.getValue(),new JSONString(internalAnnotationInfo.getType()));
jsonObject.put(FeatureStringEnum.ORGANISM_ID.getValue(), new JSONString(MainPanel.getInstance().getCurrentOrganism().getId()));
RestService.sendRequest(requestCallback, "annotationEditor/getCannedValues", "data=" +jsonObject.toString() );
}
}
| 6,591 | 61.780952 | 273 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/DbXrefRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestBuilder;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.Response;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONParser;
import com.google.gwt.json.client.JSONString;
import com.google.gwt.user.client.ui.Anchor;
import org.bbop.apollo.gwt.client.dto.AnnotationInfo;
import org.bbop.apollo.gwt.client.dto.DbXRefInfoConverter;
import org.bbop.apollo.gwt.client.dto.DbXrefInfo;
import org.bbop.apollo.gwt.client.dto.OrganismInfo;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
/**
* Created by ndunn on 1/14/15.
*/
public class DbXrefRestService {
static String TERM_LOOKUP_SERVER = "http://api.geneontology.org/api/ontology/term/"; // ECO%3A0000315
public static void saveDbXref(RequestCallback requestCallback, DbXrefInfo dbXrefInfo) {
RestService.sendRequest(requestCallback, "dbXrefInfo/save", "data=" + DbXRefInfoConverter.convertToJson(dbXrefInfo).toString());
}
public static void updateDbXref(RequestCallback requestCallback, AnnotationInfo annotationInfo,DbXrefInfo oldDbXrefInfo,DbXrefInfo newDbXrefInfo) {
// 0: "SEND↵destination:/app/AnnotationNotification↵content-length:310↵↵"{\"track\":\"ctgA\",\"features\":[{\"uniquename\":\"fd57cc6a-8e29-4a48-9832-82c06bcc869c\",\,\"operation\":\"update_non_primary_dbxrefs\",\"clientToken\":\"18068643442091616983\"}""
// "old_dbxrefs\":[{\"db\":\"aasd\",\"accession\":\"12312\"}],
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray oldDbXrefJsonArray = new JSONArray();
JSONObject oldDbXrefJsonObject = new JSONObject();
oldDbXrefJsonObject.put(FeatureStringEnum.DB.getValue(), new JSONString(oldDbXrefInfo.getTag()));
oldDbXrefJsonObject.put(FeatureStringEnum.ACCESSION.getValue(), new JSONString(oldDbXrefInfo.getValue()));
oldDbXrefJsonArray.set(0, oldDbXrefJsonObject);
featureObject.put(FeatureStringEnum.OLD_DBXREFS.getValue(), oldDbXrefJsonArray);
//\"new_dbxrefs\":[{\"db\":\"asdfasdfaaeee\",\"accession\":\"12312\"}]}]
JSONArray newDbXrefJsonArray = new JSONArray();
JSONObject newDbXrefJsonObject = new JSONObject();
newDbXrefJsonObject.put(FeatureStringEnum.DB.getValue(), new JSONString(newDbXrefInfo.getTag()));
newDbXrefJsonObject.put(FeatureStringEnum.ACCESSION.getValue(), new JSONString(newDbXrefInfo.getValue()));
newDbXrefJsonArray.set(0, newDbXrefJsonObject);
featureObject.put(FeatureStringEnum.NEW_DBXREFS.getValue(), newDbXrefJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/updateDbxref", "data=" + requestObject.toString());
}
public static void addDbXref(RequestCallback requestCallback, AnnotationInfo annotationInfo, DbXrefInfo dbXrefInfo) {
// 0: "SEND↵destination:/app/AnnotationNotification↵content-length:310↵↵"{\"track\":\"ctgA\",\"features\":[{\"uniquename\":\"fd57cc6a-8e29-4a48-9832-82c06bcc869c\",\,\"operation\":\"update_non_primary_dbxrefs\",\"clientToken\":\"18068643442091616983\"}""
// "dbxrefs\":[{\"db\":\"aasd\",\"accession\":\"12312\"}],
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray dbXrefJsonArray = new JSONArray();
JSONObject dbXrefJsonObject = new JSONObject();
dbXrefJsonObject.put(FeatureStringEnum.DB.getValue(), new JSONString(dbXrefInfo.getTag()));
dbXrefJsonObject.put(FeatureStringEnum.ACCESSION.getValue(), new JSONString(dbXrefInfo.getValue()));
dbXrefJsonArray.set(0, dbXrefJsonObject);
featureObject.put(FeatureStringEnum.DBXREFS.getValue(), dbXrefJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/addDbxref", "data=" + requestObject.toString());
}
public static void deleteDbXref(RequestCallback requestCallback, AnnotationInfo annotationInfo, DbXrefInfo dbXrefInfo) {
JSONArray featuresArray = new JSONArray();
JSONObject featureObject = new JSONObject();
String featureUniqueName = annotationInfo.getUniqueName();
featureObject.put(FeatureStringEnum.UNIQUENAME.getValue(), new JSONString(featureUniqueName));
JSONArray dbXrefJsonArray = new JSONArray();
JSONObject dbXrefJsonObject = new JSONObject();
dbXrefJsonObject.put(FeatureStringEnum.DB.getValue(), new JSONString(dbXrefInfo.getTag()));
dbXrefJsonObject.put(FeatureStringEnum.ACCESSION.getValue(), new JSONString(dbXrefInfo.getValue()));
dbXrefJsonArray.set(0, dbXrefJsonObject);
featureObject.put(FeatureStringEnum.DBXREFS.getValue(), dbXrefJsonArray);
featuresArray.set(0, featureObject);
JSONObject requestObject = new JSONObject();
requestObject.put(FeatureStringEnum.TRACK.getValue(), new JSONString(annotationInfo.getSequence()));
requestObject.put(FeatureStringEnum.FEATURES.getValue(), featuresArray);
RestService.sendRequest(requestCallback, "annotationEditor/deleteDbxref", "data=" + requestObject.toString());
}
public static void getDbXrefs(RequestCallback requestCallback, AnnotationInfo annotationInfo, OrganismInfo organismInfo) {
JSONObject jsonObject = new JSONObject();
jsonObject.put(FeatureStringEnum.UNIQUENAME.getValue(),new JSONString(annotationInfo.getUniqueName()));
jsonObject.put(FeatureStringEnum.ORGANISM_ID.getValue(),new JSONString(organismInfo.getId()));
RestService.sendRequest(requestCallback, "annotationEditor/getDbxrefs", "data=" + jsonObject.toString());
}
}
| 6,796 | 61.357798 | 273 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/GeneProductRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestBuilder;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.Response;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONParser;
import com.google.gwt.json.client.JSONString;
import com.google.gwt.user.client.ui.Anchor;
import org.bbop.apollo.gwt.client.dto.AnnotationInfo;
import org.bbop.apollo.gwt.client.dto.GeneProductConverter;
import org.bbop.apollo.gwt.client.dto.OrganismInfo;
import org.bbop.apollo.gwt.shared.geneProduct.GeneProduct;
import org.bbop.apollo.gwt.shared.geneProduct.Reference;
import org.bbop.apollo.gwt.shared.geneProduct.WithOrFrom;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ndunn on 1/14/15.
*/
public class GeneProductRestService {
static String TERM_LOOKUP_SERVER = "http://api.geneontology.org/api/ontology/term/"; // ECO%3A0000315
public static List<GeneProduct> generateGeneProducts(AnnotationInfo annotationInfo, JSONArray geneProducts){
List<GeneProduct> geneProductList = new ArrayList<>();
for(int i = 0 ; i < geneProducts.size() ; i++){
JSONObject annotationObject = geneProducts.get(i).isObject();
GeneProduct geneProduct = new GeneProduct();
geneProduct.setFeature(annotationInfo.getUniqueName());
geneProduct.setProductName(annotationObject.get("productName").isString().stringValue());
geneProduct.setEvidenceCode(annotationObject.get("evidenceCode").isString().stringValue());
geneProduct.setEvidenceCodeLabel(annotationObject.get("evidenceCodeLabel").isString().stringValue());
geneProduct.setAlternate(annotationObject.get("alternate").isBoolean().booleanValue());
if(annotationObject.containsKey("reference")){
String[] referenceString = annotationObject.get("reference").isString().stringValue().split(":");
Reference reference = new Reference(referenceString[0], referenceString[1]);
geneProduct.setReference(reference);
}
else{
geneProduct.setReference(Reference.createEmptyReference());
}
if(annotationObject.containsKey("withOrFrom")){
List<WithOrFrom> withOrFromList = new ArrayList<>();
JSONArray goWithOrFromArray = annotationObject.get("withOrFrom").isArray();
if(goWithOrFromArray==null){
String goWithString = annotationObject.get("withOrFrom").isString().stringValue();
goWithOrFromArray = JSONParser.parseStrict(goWithString).isArray();
}
for(int j = 0 ; j < goWithOrFromArray.size() ; j++){
WithOrFrom withOrFrom = new WithOrFrom(goWithOrFromArray.get(j).isString().stringValue());
withOrFromList.add(withOrFrom);
}
geneProduct.setWithOrFromList(withOrFromList);
}
List<String> notesList = new ArrayList<>();
JSONArray notesJsonArray = annotationObject.get("notes").isArray();
if(notesJsonArray==null){
String notes = annotationObject.get("notes").isString().stringValue();
notesJsonArray = JSONParser.parseStrict(notes).isArray();
}
for(int j = 0 ; j < notesJsonArray.size() ; j++){
notesList.add(notesJsonArray.get(j).isString().stringValue());
}
geneProduct.setNoteList(notesList);
geneProductList.add(geneProduct);
}
return geneProductList;
}
public static void saveGeneProduct(RequestCallback requestCallback, GeneProduct geneProduct) {
RestService.sendRequest(requestCallback, "geneProduct/save", "data=" + GeneProductConverter.convertToJson(geneProduct).toString());
}
public static void updateGeneProduct(RequestCallback requestCallback, GeneProduct geneProduct) {
RestService.sendRequest(requestCallback, "geneProduct/update", "data=" + GeneProductConverter.convertToJson(geneProduct).toString());
}
public static void deleteGeneProduct(RequestCallback requestCallback, GeneProduct geneProduct) {
RestService.sendRequest(requestCallback, "geneProduct/delete", "data=" + GeneProductConverter.convertToJson(geneProduct).toString());
}
public static void getGeneProduct(RequestCallback requestCallback, AnnotationInfo annotationInfo, OrganismInfo organismInfo) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("uniqueName",new JSONString(annotationInfo.getUniqueName()));
jsonObject.put("organism",new JSONString(organismInfo.getId()));
RestService.sendRequest(requestCallback, "geneProduct/", "data=" + jsonObject.toString());
}
private static void lookupTerm(RequestCallback requestCallback, String url) {
RestService.generateBuilder(requestCallback,RequestBuilder.GET,url);
}
public static void lookupTerm(final Anchor anchor, String evidenceCurie) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONObject returnObject = JSONParser.parseStrict(response.getText()).isObject();
anchor.setHTML(returnObject.get("label").isString().stringValue());
if(returnObject.containsKey("definition")){
anchor.setTitle(returnObject.get("definition").isString().stringValue());
}
else{
anchor.setTitle(returnObject.get("label").isString().stringValue());
}
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Failed to do lookup: "+exception.getMessage());
}
};
GeneProductRestService.lookupTerm(requestCallback,TERM_LOOKUP_SERVER + evidenceCurie);
}
}
| 6,238 | 50.139344 | 141 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/GoRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.http.client.*;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONParser;
import com.google.gwt.json.client.JSONString;
import com.google.gwt.user.client.ui.Anchor;
import org.bbop.apollo.gwt.client.dto.AnnotationInfo;
import org.bbop.apollo.gwt.client.dto.GoAnnotationConverter;
import org.bbop.apollo.gwt.client.dto.OrganismInfo;
import org.bbop.apollo.gwt.shared.go.Aspect;
import org.bbop.apollo.gwt.shared.go.GoAnnotation;
import org.bbop.apollo.gwt.shared.go.Reference;
import org.bbop.apollo.gwt.shared.go.WithOrFrom;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ndunn on 1/14/15.
*/
public class GoRestService {
static String TERM_LOOKUP_SERVER = "http://api.geneontology.org/api/ontology/term/"; // ECO%3A0000315
public static List<GoAnnotation> generateGoAnnotations(AnnotationInfo annotationInfo, JSONArray goAnnotations){
List<GoAnnotation> goAnnotationList = new ArrayList<>();
for(int i = 0 ; i < goAnnotations.size() ; i++){
JSONObject goAnnotationObject = goAnnotations.get(i).isObject();
GoAnnotation goAnnotation = new GoAnnotation();
goAnnotation.setGene(annotationInfo.getUniqueName());
goAnnotation.setAspect(Aspect.valueOf(goAnnotationObject.get("aspect").isString().stringValue()));
goAnnotation.setGoTerm(goAnnotationObject.get("goTerm").isString().stringValue());
goAnnotation.setGoTermLabel(goAnnotationObject.get("goTermLabel").isString().stringValue());
goAnnotation.setGeneRelationship(goAnnotationObject.get("geneRelationship").isString().stringValue());
goAnnotation.setEvidenceCode(goAnnotationObject.get("evidenceCode").isString().stringValue());
goAnnotation.setEvidenceCodeLabel(goAnnotationObject.get("evidenceCodeLabel").isString().stringValue());
goAnnotation.setNegate(goAnnotationObject.get("negate").isBoolean().booleanValue());
if(goAnnotationObject.containsKey("reference")){
String[] referenceString = goAnnotationObject.get("reference").isString().stringValue().split(":");
Reference reference = new Reference(referenceString[0], referenceString[1]);
goAnnotation.setReference(reference);
}
else{
goAnnotation.setReference(Reference.createEmptyReference());
}
List<WithOrFrom> withOrFromList = new ArrayList<>();
if(goAnnotationObject.containsKey("withOrFrom")) {
JSONArray goWithOrFromArray = goAnnotationObject.get("withOrFrom").isArray();
if (goWithOrFromArray == null) {
String goWithString = goAnnotationObject.get("withOrFrom").isString().stringValue();
goWithOrFromArray = JSONParser.parseStrict(goWithString).isArray();
}
for (int j = 0; j < goWithOrFromArray.size(); j++) {
WithOrFrom withOrFrom = new WithOrFrom(goWithOrFromArray.get(j).isString().stringValue());
withOrFromList.add(withOrFrom);
}
}
else{
String jsonString = Reference.UNKNOWN + ":" + Reference.NOT_PROVIDED;
withOrFromList.add(new WithOrFrom(jsonString));
}
goAnnotation.setWithOrFromList(withOrFromList);
List<String> notesList = new ArrayList<>();
JSONArray notesJsonArray = goAnnotationObject.get("notes").isArray();
if(notesJsonArray==null){
String notes = goAnnotationObject.get("notes").isString().stringValue();
notesJsonArray = JSONParser.parseStrict(notes).isArray();
}
for(int j = 0 ; j < notesJsonArray.size() ; j++){
notesList.add(notesJsonArray.get(j).isString().stringValue());
}
goAnnotation.setNoteList(notesList);
goAnnotationList.add(goAnnotation);
}
return goAnnotationList;
}
public static void saveGoAnnotation(RequestCallback requestCallback, GoAnnotation goAnnotation) {
RestService.sendRequest(requestCallback, "goAnnotation/save", "data=" + GoAnnotationConverter.convertToJson(goAnnotation).toString());
}
public static void updateGoAnnotation(RequestCallback requestCallback, GoAnnotation goAnnotation) {
RestService.sendRequest(requestCallback, "goAnnotation/update", "data=" + GoAnnotationConverter.convertToJson(goAnnotation).toString());
}
public static void deleteGoAnnotation(RequestCallback requestCallback, GoAnnotation goAnnotation) {
RestService.sendRequest(requestCallback, "goAnnotation/delete", "data=" + GoAnnotationConverter.convertToJson(goAnnotation).toString());
}
public static void getGoAnnotation(RequestCallback requestCallback, AnnotationInfo annotationInfo, OrganismInfo organismInfo) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("uniqueName",new JSONString(annotationInfo.getUniqueName()));
jsonObject.put("organism",new JSONString(organismInfo.getId()));
RestService.sendRequest(requestCallback, "goAnnotation/", "data=" + jsonObject.toString());
}
private static void lookupTerm(RequestCallback requestCallback, String url) {
RestService.generateBuilder(requestCallback,RequestBuilder.GET,url);
}
public static void lookupTerm(final Anchor anchor, String evidenceCurie) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONObject returnObject = JSONParser.parseStrict(response.getText()).isObject();
anchor.setHTML(returnObject.get("label").isString().stringValue());
if(returnObject.containsKey("definition")){
anchor.setTitle(returnObject.get("definition").isString().stringValue());
}
else{
anchor.setTitle(returnObject.get("label").isString().stringValue());
}
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Failed to do lookup: "+exception.getMessage());
}
};
GoRestService.lookupTerm(requestCallback,TERM_LOOKUP_SERVER + evidenceCurie);
}
}
| 6,665 | 48.746269 | 144 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/GroupRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.core.client.GWT;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.Response;
import com.google.gwt.json.client.*;
import com.google.gwt.user.client.Window;
import org.bbop.apollo.gwt.client.Annotator;
import org.bbop.apollo.gwt.client.AnnotatorPanel;
import org.bbop.apollo.gwt.client.dto.GroupInfo;
import org.bbop.apollo.gwt.client.dto.GroupOrganismPermissionInfo;
import org.bbop.apollo.gwt.client.dto.UserInfo;
import org.bbop.apollo.gwt.client.event.GroupChangeEvent;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import org.gwtbootstrap3.extras.select.client.ui.Option;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
/**
* Created by ndunn on 3/30/15.
*/
public class GroupRestService {
public static void loadGroups(final List<GroupInfo> groupInfoList) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
groupInfoList.clear();
JSONValue returnValue = JSONParser.parseStrict(response.getText());
JSONArray array = returnValue.isArray();
for (int i = 0; array != null && i < array.size(); i++) {
JSONObject object = array.get(i).isObject();
GroupInfo groupInfo = new GroupInfo();
groupInfo.setId((long) object.get("id").isNumber().doubleValue());
groupInfo.setName(object.get("name").isString().stringValue());
groupInfo.setNumberOfUsers((int) object.get("numberOfUsers").isNumber().doubleValue());
Integer numberOfAdmin = 0;
if (object.get("numberOfAdmin") != null) {
numberOfAdmin = (int) object.get("numberOfAdmin").isNumber().doubleValue();
}
groupInfo.setNumberOfAdmin(numberOfAdmin);
List<UserInfo> userInfoList = new ArrayList<>();
List<UserInfo> adminInfoList = new ArrayList<>();
if (object.get("users") != null) {
JSONArray usersArray = object.get("users").isArray();
for (int j = 0; j < usersArray.size(); j++) {
JSONObject userObject = usersArray.get(j).isObject();
UserInfo userInfo = new UserInfo(userObject);
userInfoList.add(userInfo);
}
}
groupInfo.setUserInfoList(userInfoList);
if (object.get("admin") != null) {
JSONArray adminArray = object.get("admin").isArray();
for (int j = 0; j < adminArray.size(); j++) {
JSONObject userObject = adminArray.get(j).isObject();
UserInfo adminInfo = new UserInfo(userObject);
adminInfoList.add(adminInfo);
}
}
groupInfo.setAdminInfoList(adminInfoList);
// TODO: use shared permission enums
JSONArray organismArray = object.get("organismPermissions").isArray();
Map<String, GroupOrganismPermissionInfo> organismPermissionMap = new TreeMap<>();
for (int j = 0; j < organismArray.size(); j++) {
JSONObject organismPermissionJsonObject = organismArray.get(j).isObject();
GroupOrganismPermissionInfo groupOrganismPermissionInfo = new GroupOrganismPermissionInfo();
if (organismPermissionJsonObject.get("id") != null) {
groupOrganismPermissionInfo.setId((long) organismPermissionJsonObject.get("id").isNumber().doubleValue());
}
groupOrganismPermissionInfo.setGroupId((long) organismPermissionJsonObject.get("groupId").isNumber().doubleValue());
groupOrganismPermissionInfo.setOrganismName(organismPermissionJsonObject.get("organism").isString().stringValue());
if (organismPermissionJsonObject.get("permissions") != null) {
JSONArray permissionsArray = JSONParser.parseStrict(organismPermissionJsonObject.get("permissions").isString().stringValue()).isArray();
for (int permissionIndex = 0; permissionIndex < permissionsArray.size(); ++permissionIndex) {
String permission = permissionsArray.get(permissionIndex).isString().stringValue();
switch (permission) {
case "ADMINISTRATE":
groupOrganismPermissionInfo.setAdmin(true);
break;
case "WRITE":
groupOrganismPermissionInfo.setWrite(true);
break;
case "EXPORT":
groupOrganismPermissionInfo.setExport(true);
break;
case "READ":
groupOrganismPermissionInfo.setRead(true);
break;
default:
Bootbox.alert("Unsure how to handle this permission '" + permission + "'");
}
}
}
organismPermissionMap.put(groupOrganismPermissionInfo.getOrganismName(), groupOrganismPermissionInfo);
}
groupInfo.setOrganismPermissionMap(organismPermissionMap);
groupInfoList.add(groupInfo);
}
Annotator.eventBus.fireEvent(new GroupChangeEvent(GroupChangeEvent.Action.GROUPS_RELOADED));
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("error retrieving groups");
}
};
RestService.sendRequest(requestCallback, "group/loadGroups/");
}
public static void updateGroup(final GroupInfo selectedGroupInfo) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
Annotator.eventBus.fireEvent(new GroupChangeEvent(GroupChangeEvent.Action.RELOAD_GROUPS));
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("error updating group " + selectedGroupInfo.getName() + " " + exception);
}
};
RestService.sendRequest(requestCallback, "group/updateGroup/", "data=" + selectedGroupInfo.toJSON().toString());
}
public static void deleteGroup(final GroupInfo selectedGroupInfo) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
Annotator.eventBus.fireEvent(new GroupChangeEvent(GroupChangeEvent.Action.RELOAD_GROUPS));
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("error updating group " + selectedGroupInfo.getName() + " " + exception);
}
};
RestService.sendRequest(requestCallback, "group/deleteGroup/", "data=" + selectedGroupInfo.toJSON().toString());
}
public static void addNewGroup(final GroupInfo selectedGroupInfo) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
Annotator.eventBus.fireEvent(new GroupChangeEvent(GroupChangeEvent.Action.ADD_GROUP));
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("error updating group " + selectedGroupInfo.getName() + " " + exception);
}
};
RestService.sendRequest(requestCallback, "group/createGroup/", "data=" + selectedGroupInfo.toJSON().toString());
}
public static void updateOrganismPermission(GroupOrganismPermissionInfo object) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
GWT.log("success");
// loadUsers(userInfoList);
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error updating permissions: " + exception);
}
};
RestService.sendRequest(requestCallback, "group/updateOrganismPermission", "data=" + object.toJSON());
}
public static void updateUserGroups(RequestCallback requestCallback, GroupInfo selectedGroupInfo, List<Option> selectedValues) {
// RestService.sendRequest(requestCallback, "group/updateMembership", "data=" + object.toJSON());
JSONObject jsonObject = new JSONObject();
jsonObject.put("groupId", new JSONNumber(selectedGroupInfo.getId()));
JSONArray userArray = new JSONArray();
for (Option userData : selectedValues) {
String emailValue = userData.getValue().split("\\(")[1].trim();
emailValue = emailValue.substring(0, emailValue.length() - 1);
userArray.set(userArray.size(), new JSONString(emailValue));
}
jsonObject.put("users", userArray);
RestService.sendRequest(requestCallback, "group/updateMembership", "data=" + jsonObject);
}
public static void updateGroupAdmin(RequestCallback requestCallback, GroupInfo selectedGroupInfo, List<Option> selectedValues) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("groupId", new JSONNumber(selectedGroupInfo.getId()));
JSONArray userArray = new JSONArray();
for (Option userData : selectedValues) {
String emailValue = userData.getValue().split("\\(")[1].trim();
emailValue = emailValue.substring(0, emailValue.length() - 1);
userArray.set(userArray.size(), new JSONString(emailValue));
}
jsonObject.put("users", userArray);
RestService.sendRequest(requestCallback, "group/updateGroupAdmin", "data=" + jsonObject);
}
}
| 11,005 | 47.484581 | 164 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/OrganismRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.http.client.*;
import com.google.gwt.json.client.*;
import com.google.gwt.user.client.Window;
import org.bbop.apollo.gwt.client.Annotator;
import org.bbop.apollo.gwt.client.ErrorDialog;
import org.bbop.apollo.gwt.client.LoadingDialog;
import org.bbop.apollo.gwt.client.MainPanel;
import org.bbop.apollo.gwt.client.dto.AppInfoConverter;
import org.bbop.apollo.gwt.client.dto.OrganismInfo;
import org.bbop.apollo.gwt.client.dto.OrganismInfoConverter;
import org.bbop.apollo.gwt.client.event.OrganismChangeEvent;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import org.gwtbootstrap3.extras.bootbox.client.callback.ConfirmCallback;
import java.util.List;
/**
* Created by ndunn on 1/14/15.
*/
public class OrganismRestService {
public static void loadOrganisms(RequestCallback requestCallback) {
RestService.sendRequest(requestCallback, "organism/findAllOrganisms");
}
public static void loadOrganisms(Boolean publicOnly, Boolean showObsoletes, RequestCallback requestCallback) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("publicOnly",JSONBoolean.getInstance(publicOnly));
jsonObject.put("showObsolete",JSONBoolean.getInstance(showObsoletes));
RestService.sendRequest(requestCallback, "organism/findAllOrganisms","data="+jsonObject.toString());
}
public static void loadOrganisms(final List<OrganismInfo> organismInfoList) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
organismInfoList.clear();
organismInfoList.addAll(OrganismInfoConverter.convertJSONStringToOrganismInfoList(response.getText()));
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error loading organisms");
}
};
loadOrganisms(requestCallback);
}
public static void updateOrganismInfo(final OrganismInfo organismInfo,boolean forceReload) {
final LoadingDialog loadingDialog = new LoadingDialog("Updating Organism Information");
JSONObject organismInfoObject = organismInfo.toJSON();
organismInfoObject.put("forceReload",JSONBoolean.getInstance(forceReload));
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
loadingDialog.hide();
JSONValue jsonValue = JSONParser.parseStrict(response.getText());
if(jsonValue.isObject()!=null && jsonValue.isObject()!=null && jsonValue.isObject().containsKey(FeatureStringEnum.ERROR.getValue())){
String errorMessage = jsonValue.isObject().get(FeatureStringEnum.ERROR.getValue()).isString().stringValue();
ErrorDialog errorDialog = new ErrorDialog("Unable to update the organism",errorMessage,true,true);
}
else{
OrganismChangeEvent organismChangeEvent = new OrganismChangeEvent(OrganismChangeEvent.Action.LOADED_ORGANISMS);
List<OrganismInfo> organismInfoList = OrganismInfoConverter.convertJSONStringToOrganismInfoList(response.getText());
organismChangeEvent.setOrganismInfoList(organismInfoList);
Annotator.eventBus.fireEvent(organismChangeEvent);
Bootbox.confirm("Organism updated. Reload?", new ConfirmCallback() {
@Override
public void callback(boolean result) {
if(result) Window.Location.reload();
}
});
}
}
@Override
public void onError(Request request, Throwable exception) {
loadingDialog.hide();
Bootbox.alert("error updating organism info: "+exception);
}
};
RestService.sendRequest(requestCallback, "organism/updateOrganismInfo", "data=" + organismInfoObject.toString());
}
public static void createOrganism(RequestCallback requestCallback, OrganismInfo organismInfo) {
RestService.sendRequest(requestCallback,"organism/addOrganism", OrganismInfoConverter.convertOrganismInfoToJSONObject(organismInfo));
}
public static void deleteOrganism(RequestCallback requestCallback, OrganismInfo organismInfo) {
RestService.sendRequest(requestCallback,"organism/deleteOrganism", OrganismInfoConverter.convertOrganismInfoToJSONObject(organismInfo));
}
public static void removeTrack(RequestCallback requestCallback, OrganismInfo organismInfo,String trackName) {
JSONObject data = new JSONObject();
JSONObject organismObject = OrganismInfoConverter.convertOrganismInfoToJSONObject(organismInfo);
data.put(FeatureStringEnum.ORGANISM.getValue(),organismObject);
data.put(FeatureStringEnum.TRACK_LABEL.getValue(),new JSONString(trackName));
RestService.sendRequest(requestCallback,"organism/removeTrackFromOrganism", data);
}
public static void switchOrganismById(String newOrganismId) {
final LoadingDialog loadingDialog = new LoadingDialog();
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONObject returnValue = JSONParser.parseStrict(response.getText()).isObject();
MainPanel.getInstance().setAppState(AppInfoConverter.convertFromJson(returnValue));
loadingDialog.hide();
}
@Override
public void onError(Request request, Throwable exception) {
loadingDialog.hide();
Bootbox.alert("Error changing organisms");
}
};
RestService.sendRequest(requestCallback,"annotator/setCurrentOrganism/"+newOrganismId);
}
public static void switchSequenceById(String newSequenceId) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONObject returnValue = JSONParser.parseStrict(response.getText()).isObject();
MainPanel.getInstance().setAppState(AppInfoConverter.convertFromJson(returnValue));
OrganismChangeEvent organismChangeEvent = new OrganismChangeEvent(OrganismChangeEvent.Action.LOADED_ORGANISMS);
Annotator.eventBus.fireEvent(organismChangeEvent);
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error changing organisms: "+exception.getMessage());
}
};
RestService.sendRequest(requestCallback,"annotator/setCurrentSequence/"+ newSequenceId);
}
public static void updateOfficialTrack(RequestCallback requestCallback,OrganismInfo organismInfo, String trackName, boolean officialTrack) {
JSONObject data = new JSONObject();
JSONObject organismObject = OrganismInfoConverter.convertOrganismInfoToJSONObject(organismInfo);
data.put(FeatureStringEnum.ORGANISM.getValue(),organismObject);
data.put(FeatureStringEnum.ID.getValue(),organismObject.get(FeatureStringEnum.ID.getValue()));
data.put(FeatureStringEnum.TRACK_LABEL.getValue(),new JSONString(trackName));
String command = "";
if(trackName==null || trackName.trim().length()==0){
command = "CLEAR";
}
else
if(officialTrack){
command = "ADD";
}
else{
command = "REMOVE";
}
data.put("trackCommand",new JSONString(command));
RestService.sendRequest(requestCallback,"organism/updateOfficialGeneSetTrack", data);
}
}
| 8,042 | 46.035088 | 149 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/ProvenanceRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestBuilder;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.Response;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONParser;
import com.google.gwt.json.client.JSONString;
import com.google.gwt.user.client.ui.Anchor;
import org.bbop.apollo.gwt.client.dto.AnnotationInfo;
import org.bbop.apollo.gwt.client.dto.OrganismInfo;
import org.bbop.apollo.gwt.client.dto.ProvenanceConverter;
import org.bbop.apollo.gwt.shared.provenance.Reference;
import org.bbop.apollo.gwt.shared.provenance.WithOrFrom;
import org.bbop.apollo.gwt.shared.provenance.Provenance;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ndunn on 1/14/15.
*/
public class ProvenanceRestService {
static String TERM_LOOKUP_SERVER = "http://api.geneontology.org/api/ontology/term/"; // ECO%3A0000315
public static List<Provenance> generateProvenances(AnnotationInfo annotationInfo, JSONArray provenances){
List<Provenance> provenanceList = new ArrayList<>();
for(int i = 0 ; i < provenances.size() ; i++){
JSONObject provenanceObject = provenances.get(i).isObject();
Provenance provenance = new Provenance();
provenance.setFeature(annotationInfo.getUniqueName());
provenance.setField(provenanceObject.get("field").isString().stringValue());
provenance.setEvidenceCode(provenanceObject.get("evidenceCode").isString().stringValue());
provenance.setEvidenceCodeLabel(provenanceObject.get("evidenceCodeLabel").isString().stringValue());
if(provenanceObject.containsKey("reference")){
String[] referenceString = provenanceObject.get("reference").isString().stringValue().split(":");
Reference reference = new Reference(referenceString[0], referenceString[1]);
provenance.setReference(reference);
}
else{
provenance.setReference(Reference.createEmptyReference());
}
List<WithOrFrom> withOrFromList = new ArrayList<>();
if(provenanceObject.containsKey("withOrFrom")) {
JSONArray goWithOrFromArray = provenanceObject.get("withOrFrom").isArray();
if (goWithOrFromArray == null) {
String goWithString = provenanceObject.get("withOrFrom").isString().stringValue();
goWithOrFromArray = JSONParser.parseStrict(goWithString).isArray();
}
for (int j = 0; j < goWithOrFromArray.size(); j++) {
WithOrFrom withOrFrom = new WithOrFrom(goWithOrFromArray.get(j).isString().stringValue());
withOrFromList.add(withOrFrom);
}
provenance.setWithOrFromList(withOrFromList);
}
List<String> notesList = new ArrayList<>();
JSONArray notesJsonArray = provenanceObject.get("notes").isArray();
if(notesJsonArray==null){
String notes = provenanceObject.get("notes").isString().stringValue();
notesJsonArray = JSONParser.parseStrict(notes).isArray();
}
for(int j = 0 ; j < notesJsonArray.size() ; j++){
notesList.add(notesJsonArray.get(j).isString().stringValue());
}
provenance.setNoteList(notesList);
provenanceList.add(provenance);
}
return provenanceList;
}
public static void saveProvenance(RequestCallback requestCallback, Provenance provenance) {
RestService.sendRequest(requestCallback, "provenance/save", "data=" + ProvenanceConverter.convertToJson(provenance).toString());
}
public static void updateProvenance(RequestCallback requestCallback, Provenance provenance) {
RestService.sendRequest(requestCallback, "provenance/update", "data=" + ProvenanceConverter.convertToJson(provenance).toString());
}
public static void deleteProvenance(RequestCallback requestCallback, Provenance provenance) {
RestService.sendRequest(requestCallback, "provenance/delete", "data=" + ProvenanceConverter.convertToJson(provenance).toString());
}
public static void getProvenance(RequestCallback requestCallback, AnnotationInfo annotationInfo, OrganismInfo organismInfo) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("uniqueName",new JSONString(annotationInfo.getUniqueName()));
jsonObject.put("organism",new JSONString(organismInfo.getId()));
RestService.sendRequest(requestCallback, "provenance/", "data=" + jsonObject.toString());
}
private static void lookupTerm(RequestCallback requestCallback, String url) {
RestService.generateBuilder(requestCallback,RequestBuilder.GET,url);
}
public static void lookupTerm(final Anchor anchor, String evidenceCurie) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONObject returnObject = JSONParser.parseStrict(response.getText()).isObject();
anchor.setHTML(returnObject.get("label").isString().stringValue());
if(returnObject.containsKey("definition")){
anchor.setTitle(returnObject.get("definition").isString().stringValue());
}
else{
anchor.setTitle(returnObject.get("label").isString().stringValue());
}
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Failed to do lookup: "+exception.getMessage());
}
};
ProvenanceRestService.lookupTerm(requestCallback,TERM_LOOKUP_SERVER + evidenceCurie);
}
}
| 6,085 | 46.92126 | 138 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/ProxyRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.core.client.GWT;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.Response;
import com.google.gwt.http.client.URL;
import com.google.gwt.json.client.*;
import com.google.gwt.user.client.Window;
import org.bbop.apollo.gwt.client.Annotator;
import org.bbop.apollo.gwt.client.ErrorDialog;
import org.bbop.apollo.gwt.client.LoginDialog;
import org.bbop.apollo.gwt.client.dto.UserInfo;
import org.bbop.apollo.gwt.client.dto.UserInfoConverter;
import org.bbop.apollo.gwt.client.dto.UserOrganismPermissionInfo;
import org.bbop.apollo.gwt.client.event.UserChangeEvent;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import org.gwtbootstrap3.extras.bootbox.client.callback.ConfirmCallback;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ndunn on 1/14/15.
*/
public class ProxyRestService {
public static void findPubMedId(RequestCallback requestCallback,String pmidNumber) {
RestService.sendRequest(requestCallback, "ncbiProxyService?db=pubmed&operation=fetch&id="+pmidNumber);
}
}
| 1,222 | 33.942857 | 110 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/RestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.http.client.RequestBuilder;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.RequestException;
import com.google.gwt.http.client.URL;
import com.google.gwt.json.client.JSONObject;
import org.bbop.apollo.gwt.client.Annotator;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
/**
* Created by ndunn on 1/14/15.
*/
public class RestService {
public static void sendRequest(RequestCallback requestCallback, String url) {
sendRequest(requestCallback, url, (String) null);
}
public static void sendRequest(RequestCallback requestCallback, String url, JSONObject jsonObject) {
sendRequest(requestCallback, url, "data=" + jsonObject.toString());
}
public static void sendRequest(RequestCallback requestCallback, String url, String data) {
sendRequest(requestCallback, url, data, RequestBuilder.POST);
}
public static String fixUrl(String url) {
String rootUrl = Annotator.getRootUrl();
if (!url.startsWith(rootUrl)) {
url = rootUrl + url;
}
// add the clientToken parameter if not exists
if (!url.contains(FeatureStringEnum.CLIENT_TOKEN.getValue())) {
url += url.contains("?") ? "&" : "?";
url += FeatureStringEnum.CLIENT_TOKEN.getValue();
url += "=";
url += Annotator.getClientToken();
}
return url;
}
public static void sendRequest(RequestCallback requestCallback, String url, String data, RequestBuilder.Method method) {
url = fixUrl(url);
RequestBuilder builder = generateBuilder(requestCallback,method, url, data);
}
public static void sendGetRequest(RequestCallback requestCallback, String url) {
sendRequest(requestCallback, url, null, RequestBuilder.GET);
}
public static RequestBuilder generateBuilder(RequestCallback requestCallback, RequestBuilder.Method method, String url) {
return generateBuilder(requestCallback,method,url,null);
}
public static RequestBuilder generateBuilder(RequestCallback requestCallback, RequestBuilder.Method method, String url, String data) {
RequestBuilder builder = new RequestBuilder(method, URL.encode(url));
if (data != null) {
builder.setRequestData(data);
}
builder.setHeader("Content-type", "application/x-www-form-urlencoded");
builder.setHeader("Accept", "application/json");
try {
if (requestCallback != null) {
builder.setCallback(requestCallback);
}
builder.send();
} catch (RequestException e) {
Bootbox.alert(e.getMessage());
}
return builder;
}
}
| 2,850 | 36.513158 | 138 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/SearchRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.core.client.GWT;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONString;
/**
*
*/
public class SearchRestService {
public static void getTools(RequestCallback requestCallback) {
RestService.sendRequest(requestCallback, "annotationEditor/getSequenceSearchTools");
}
public static void searchSequence(RequestCallback requestCallback, String searchToolKey , String residues, String databaseId) {
// RestService.sendRequest(requestCallback, "organism/updateOrganismInfo", "data=" + organismInfoObject.toString());
JSONObject searchObject = new JSONObject();
JSONObject searchKey = new JSONObject();
searchKey.put("key",new JSONString(searchToolKey));
searchKey.put("residues",new JSONString(residues));
if(databaseId!=null){
searchKey.put("database_id",new JSONString(databaseId));
}
searchObject.put("search",searchKey);
RestService.sendRequest(requestCallback, "annotationEditor/searchSequence","data="+searchObject.toString());
}
}
| 1,144 | 32.676471 | 129 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/SequenceRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.core.client.GWT;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.Response;
import com.google.gwt.json.client.*;
import org.bbop.apollo.gwt.client.Annotator;
import org.bbop.apollo.gwt.client.ExportPanel;
import org.bbop.apollo.gwt.client.SequencePanel;
import org.bbop.apollo.gwt.client.dto.OrganismInfo;
import org.bbop.apollo.gwt.client.dto.SequenceInfo;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
/**
* Created by ndunn on 1/14/15.
*/
public class SequenceRestService {
public static void setCurrentSequence(RequestCallback requestCallback, SequenceInfo sequenceInfo) {
RestService.sendRequest(requestCallback, "sequence/setCurrentSequence/" + sequenceInfo.getId());
}
public static void setCurrentSequenceForString(RequestCallback requestCallback, String sequenceName, OrganismInfo organismInfo) {
RestService.sendRequest(requestCallback, "sequence/setCurrentSequenceForNameAndOrganism/" +organismInfo.getId() +"?sequenceName="+sequenceName);
}
public static void generateLink(final ExportPanel exportPanel) {
JSONObject jsonObject = new JSONObject();
String type = exportPanel.getType();
jsonObject.put("type", new JSONString(exportPanel.getType()));
jsonObject.put("exportAllSequences", new JSONString(exportPanel.getExportAll().toString()));
if (type.equals(FeatureStringEnum.TYPE_CHADO.getValue())) {
jsonObject.put("chadoExportType", new JSONString(exportPanel.getChadoExportType()));
jsonObject.put("seqType", new JSONString(""));
jsonObject.put("exportGff3Fasta", new JSONString(""));
jsonObject.put("output", new JSONString(""));
jsonObject.put("format", new JSONString(""));
}
else if (type.equals(FeatureStringEnum.TYPE_VCF.getValue())) {
GWT.log("type is TYPE_VCF");
jsonObject.put("output", new JSONString("file"));
jsonObject.put("format", new JSONString("gzip"));
jsonObject.put("seqType", new JSONString(""));
jsonObject.put("exportGff3Fasta", new JSONString(""));
jsonObject.put("chadoExportType", new JSONString(""));
}
else if (type.equals(FeatureStringEnum.TYPE_JBROWSE.getValue())) {
GWT.log("type is TYPE_JBROWSE");
jsonObject.put("output", new JSONString("file"));
jsonObject.put("format", new JSONString("gzip"));
jsonObject.put("seqType", new JSONString(""));
jsonObject.put("exportGff3Fasta", new JSONString(""));
jsonObject.put("chadoExportType", new JSONString(""));
jsonObject.put("exportJBrowseSequence", JSONBoolean.getInstance(exportPanel.getExportJBrowseSequence()));
jsonObject.put("exportToThisOrganism", JSONBoolean.getInstance(exportPanel.getExportToThisOrganism()));
}
else if (type.equals(FeatureStringEnum.TYPE_GO.getValue())) {
jsonObject.put("chadoExportType", new JSONString(""));
jsonObject.put("seqType", new JSONString(exportPanel.getSequenceType()));
jsonObject.put("exportGff3Fasta", new JSONString(exportPanel.getExportGff3Fasta().toString()));
jsonObject.put("output", new JSONString("file"));
jsonObject.put("format", new JSONString("gzip"));
}
else {
jsonObject.put("chadoExportType", new JSONString(""));
jsonObject.put("seqType", new JSONString(exportPanel.getSequenceType()));
jsonObject.put("exportGff3Fasta", new JSONString(exportPanel.getExportGff3Fasta().toString()));
if(exportPanel.getRegion()!=null){
jsonObject.put("region", new JSONString(exportPanel.getRegion()));
}
jsonObject.put("output", new JSONString("file"));
jsonObject.put("format", new JSONString("gzip"));
}
JSONArray jsonArray = new JSONArray();
for (SequenceInfo sequenceInfo : exportPanel.getSequenceList()) {
jsonArray.set(jsonArray.size(), new JSONString(sequenceInfo.getName()));
}
jsonObject.put("sequences", jsonArray);
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONObject responseObject = JSONParser.parseStrict(response.getText()).isObject();
GWT.log("Received response: "+responseObject.toString());
String uuid = responseObject.get("uuid").isString().stringValue();
String exportType = responseObject.get("exportType").isString().stringValue();
String sequenceType = responseObject.get("seqType").isString().stringValue();
String exportUrl = Annotator.getRootUrl() + "IOService/download?uuid=" + uuid + "&exportType=" + exportType + "&seqType=" + sequenceType+"&format=gzip";
exportPanel.setExportUrl(exportUrl);
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error: " + exception);
}
};
RequestCallback requestCallbackForChadoExport = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONObject responseObject = JSONParser.parseStrict(response.getText()).isObject();
exportPanel.showExportStatus(responseObject.toString());
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error: " + exception);
}
};
if (type.equals(FeatureStringEnum.TYPE_CHADO.getValue())) {
RestService.sendRequest(requestCallbackForChadoExport, "IOService/write", "data=" + jsonObject.toString());
}
else {
RestService.sendRequest(requestCallback, "IOService/write", "data=" + jsonObject.toString());
}
}
public static void setCurrentSequenceAndLocation(RequestCallback requestCallback, String sequenceNameString, Integer start, Integer end) {
setCurrentSequenceAndLocation(requestCallback,sequenceNameString,start,end,false) ;
}
public static void setCurrentSequenceAndLocation(RequestCallback requestCallback, String sequenceNameString, Integer start, Integer end,boolean suppressOutput) {
String url = "sequence/setCurrentSequenceLocation/?name=" + sequenceNameString + "&start=" + start + "&end=" + end;
if(suppressOutput){
url += "&suppressOutput=true";
}
RestService.sendRequest(requestCallback, url);
}
public static void getSequenceForOffsetAndMax(RequestCallback requestCallback, String text, int start, int length, String sortBy,Boolean sortNameAscending, String minFeatureLengthText, String maxFeatureLengthText) {
String searchString = "sequence/getSequences/?name=" + text + "&start=" + start + "&length=" + length ;
if(sortBy!=null && sortBy.length()>1){
searchString += "&sort="+sortBy+"&asc=" + sortNameAscending;
}
try {
searchString += "&minFeatureLength=" + Integer.parseInt(minFeatureLengthText);
} catch (NumberFormatException nfe) {
//
}
try {
searchString += "&maxFeatureLength=" + Integer.parseInt(maxFeatureLengthText);
} catch (NumberFormatException nfe) {
//
}
RestService.sendRequest(requestCallback, searchString);
}
public static void getChadoExportStatus(final SequencePanel sequencePanel) {
String requestUrl = Annotator.getRootUrl() + "IOService/chadoExportStatus";
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONObject responseObject = JSONParser.parseStrict(response.getText()).isObject();
sequencePanel.setChadoExportStatus(responseObject.get("export_status").isString().stringValue());
}
@Override
public void onError(Request request, Throwable exception) {
sequencePanel.setChadoExportStatus("false");
}
};
RestService.sendRequest(requestCallback, requestUrl);
}
}
| 8,622 | 49.426901 | 219 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/rest/UserRestService.java
|
package org.bbop.apollo.gwt.client.rest;
import com.google.gwt.core.client.GWT;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.Response;
import com.google.gwt.http.client.URL;
import com.google.gwt.json.client.*;
import com.google.gwt.user.client.Window;
import org.bbop.apollo.gwt.client.Annotator;
import org.bbop.apollo.gwt.client.ErrorDialog;
import org.bbop.apollo.gwt.client.LoginDialog;
import org.bbop.apollo.gwt.client.dto.UserInfo;
import org.bbop.apollo.gwt.client.dto.UserInfoConverter;
import org.bbop.apollo.gwt.client.dto.UserOrganismPermissionInfo;
import org.bbop.apollo.gwt.client.event.UserChangeEvent;
import org.bbop.apollo.gwt.shared.FeatureStringEnum;
import org.gwtbootstrap3.extras.bootbox.client.Bootbox;
import org.gwtbootstrap3.extras.bootbox.client.callback.ConfirmCallback;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ndunn on 1/14/15.
*/
public class UserRestService {
public static void login(RequestCallback requestCallback, JSONObject data) {
RestService.sendRequest(requestCallback, "Login", data.toString());
}
public static void registerAdmin(RequestCallback requestCallback, JSONObject data) {
RestService.sendRequest(requestCallback, "login/registerAdmin", data);
}
public static void login(String username, String password, Boolean rememberMe, final LoginDialog loginDialog) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONValue j = null;
try {
j = JSONParser.parseStrict(response.getText());
} catch (Exception e) {
GWT.log("Error parsing login response: " + e);
// Window.alert("Error parsing login response, reloading");
Window.Location.reload();
return;
}
JSONObject o = j.isObject();
if (o.get("error") != null) {
loginDialog.setError(o.get("error").isString().stringValue() + "!");
} else {
Window.Location.reload();
}
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error loading organisms");
}
};
String passwordString = URL.encodeQueryString(password);
JSONObject jsonObject = new JSONObject();
jsonObject.put("operation", new JSONString("login"));
jsonObject.put("username", new JSONString(username));
jsonObject.put("password", new JSONString(passwordString));
jsonObject.put("rememberMe", JSONBoolean.getInstance(rememberMe));
login(requestCallback, jsonObject);
}
public static void loadUsers(RequestCallback requestCallback) {
loadUsers(requestCallback, -1, -1, "", "name", true, false);
}
public static void loadUsers(RequestCallback requestCallback, Integer start, Integer length, String searchNameString, String searchColumnString, Boolean sortAscending, Boolean showInactiveUsers) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("start", new JSONNumber(start < 0 ? 0 : start));
jsonObject.put("length", new JSONNumber(length < 0 ? 1000 : length));
jsonObject.put("name", new JSONString(searchNameString));
jsonObject.put("sortColumn", new JSONString(searchColumnString));
jsonObject.put("sortAscending", JSONBoolean.getInstance(sortAscending));
jsonObject.put("showInactiveUsers", JSONBoolean.getInstance(showInactiveUsers));
RestService.sendRequest(requestCallback, "user/loadUsers/", jsonObject);
}
public static void loadUsers(final List<UserInfo> userInfoList) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONValue returnValue = JSONParser.parseStrict(response.getText());
JSONArray array = returnValue.isArray();
userInfoList.clear();
for (int i = 0; array != null && i < array.size(); i++) {
JSONObject object = array.get(i).isObject();
UserInfo userInfo = UserInfoConverter.convertToUserInfoFromJSON(object);
userInfoList.add(userInfo);
}
Annotator.eventBus.fireEvent(new UserChangeEvent(UserChangeEvent.Action.USERS_RELOADED));
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error loading organisms");
}
};
loadUsers(requestCallback);
}
public static void logout() {
Bootbox.confirm("Logout?", new ConfirmCallback() {
@Override
public void callback(boolean result) {
if(result){
logout(null);
}
}
});
}
public static void logout(final String redirectUrl) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
if (redirectUrl != null) {
Window.Location.replace(redirectUrl);
} else {
Window.Location.reload();
}
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error logging out " + exception);
}
};
RestService.sendRequest(requestCallback, "Login?operation=logout");
}
public static void updateUser(RequestCallback requestCallback, UserInfo selectedUserInfo) {
JSONObject jsonObject = selectedUserInfo.toJSON();
RestService.sendRequest(requestCallback, "user/updateUser", "data=" + jsonObject.toString());
}
public static void updateUserTrackPanelPreference(RequestCallback requestCallback, boolean tracklist) {
JSONObject jsonObject = new JSONObject();
jsonObject.put("tracklist", JSONBoolean.getInstance(tracklist));
RestService.sendRequest(requestCallback, "user/updateTrackListPreference", "data=" + jsonObject.toString());
}
public static void activate(final List<UserInfo> userInfoList, UserInfo selectedUserInfo) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONValue v = JSONParser.parseStrict(response.getText());
JSONObject o = v.isObject();
if (o.containsKey(FeatureStringEnum.ERROR.getValue())) {
new ErrorDialog("Error Activiating User", o.get(FeatureStringEnum.ERROR.getValue()).isString().stringValue(), true, true);
} else {
loadUsers(userInfoList);
}
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error deleting user: " + exception);
}
};
JSONObject jsonObject = selectedUserInfo.toJSON();
RestService.sendRequest(requestCallback, "user/activateUser", "data=" + jsonObject.toString());
}
public static void inactivate(final List<UserInfo> userInfoList, UserInfo selectedUserInfo) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONValue v = JSONParser.parseStrict(response.getText());
JSONObject o = v.isObject();
if (o.containsKey(FeatureStringEnum.ERROR.getValue())) {
new ErrorDialog("Error Inactivating User", o.get(FeatureStringEnum.ERROR.getValue()).isString().stringValue(), true, true);
} else {
loadUsers(userInfoList);
}
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error inactivating user: " + exception);
}
};
JSONObject jsonObject = selectedUserInfo.toJSON();
RestService.sendRequest(requestCallback, "user/inactivateUser", "data=" + jsonObject.toString());
}
public static void deleteUser(final List<UserInfo> userInfoList, UserInfo selectedUserInfo) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONValue v = JSONParser.parseStrict(response.getText());
JSONObject o = v.isObject();
if (o.containsKey(FeatureStringEnum.ERROR.getValue())) {
new ErrorDialog("Error Deleting User", o.get(FeatureStringEnum.ERROR.getValue()).isString().stringValue(), true, true);
} else {
loadUsers(userInfoList);
}
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error deleting user: " + exception);
}
};
JSONObject jsonObject = selectedUserInfo.toJSON();
RestService.sendRequest(requestCallback, "user/deleteUser", "data=" + jsonObject.toString());
}
public static void createUser(final List<UserInfo> userInfoList, UserInfo selectedUserInfo) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
JSONValue v = JSONParser.parseStrict(response.getText());
JSONObject o = v.isObject();
if (o.containsKey(FeatureStringEnum.ERROR.getValue())) {
new ErrorDialog("Error Creating User", o.get(FeatureStringEnum.ERROR.getValue()).isString().stringValue(), true, true);
} else {
loadUsers(userInfoList);
}
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error adding user: " + exception);
}
};
JSONObject jsonObject = selectedUserInfo.toJSON();
RestService.sendRequest(requestCallback, "user/createUser", "data=" + jsonObject.toString());
}
public static void removeUserFromGroup(final String groupName, final List<UserInfo> userInfoList, final UserInfo selectedUserInfo) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
List<UserInfo> userInfoList = new ArrayList<>();
userInfoList.add(selectedUserInfo);
Annotator.eventBus.fireEvent(new UserChangeEvent(userInfoList, UserChangeEvent.Action.REMOVE_USER_FROM_GROUP, groupName));
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error removing group from user: " + exception);
}
};
JSONObject jsonObject = selectedUserInfo.toJSON();
jsonObject.put("group", new JSONString(groupName));
RestService.sendRequest(requestCallback, "user/removeUserFromGroup", "data=" + jsonObject.toString());
}
public static void addUserToGroup(final String groupName, final UserInfo selectedUserInfo) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
List<UserInfo> userInfoList = new ArrayList<>();
userInfoList.add(selectedUserInfo);
Annotator.eventBus.fireEvent(new UserChangeEvent(userInfoList, UserChangeEvent.Action.ADD_USER_TO_GROUP, groupName));
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error adding group to user: " + exception);
}
};
JSONObject jsonObject = selectedUserInfo.toJSON();
jsonObject.put("group", new JSONString(groupName));
RestService.sendRequest(requestCallback, "user/addUserToGroup", "data=" + jsonObject.toString());
}
public static void updateOrganismPermission(UserOrganismPermissionInfo object) {
RequestCallback requestCallback = new RequestCallback() {
@Override
public void onResponseReceived(Request request, Response response) {
GWT.log("success");
}
@Override
public void onError(Request request, Throwable exception) {
Bootbox.alert("Error updating permissions: " + exception);
}
};
RestService.sendRequest(requestCallback, "user/updateOrganismPermission", "data=" + object.toJSON().toString());
}
}
| 13,378 | 43.448505 | 200 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/client/track/TrackConfigurationTemplate.java
|
package org.bbop.apollo.gwt.client.track;
import com.google.gwt.core.client.GWT;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONString;
import com.google.gwt.user.client.Window;
import org.bbop.apollo.gwt.shared.track.TrackTypeEnum;
public class TrackConfigurationTemplate {
private String storeClass;
private String urlTemplate;
private String label;
private String type;
private String key;
private String category;
private String topLevelFeatures;
private String topType;
private TrackTypeEnum typeEnum;
// key is entered
public TrackConfigurationTemplate(String storeClass,
String urlTemplate,
String label,
String type,
String key,
String category,
TrackTypeEnum typeEnum
) {
this(storeClass,urlTemplate,label,type,key,category,typeEnum,null,null);
}
// public TrackConfigurationTemplate(String storeClass,
// String urlTemplate,
// String label,
// String type,
// String key,
// String category,
// TrackTypeEnum typeEnum,
// String topLevelFeatures){
// this(storeClass,urlTemplate,label,type,key,category,typeEnum,topLevelFeatures,null);
// }
public TrackConfigurationTemplate(String storeClass,
String urlTemplate,
String label,
String type,
String key,
String category,
TrackTypeEnum typeEnum,
String topLevelFeatures,
String topType
) {
this.storeClass = storeClass;
this.urlTemplate = urlTemplate;
this.label = label;
this.type = type;
this.key = key;
this.category = category ;
this.typeEnum = typeEnum ;
if(topType!=null){
this.topType = topType;
}
if(topLevelFeatures!=null){
this.topLevelFeatures = topLevelFeatures;
}
}
JSONObject toJSON() {
JSONObject returnObject = new JSONObject();
returnObject.put("storeClass", new JSONString(this.storeClass));
returnObject.put("urlTemplate", new JSONString(this.urlTemplate));
returnObject.put("label", new JSONString(this.label));
returnObject.put("type", new JSONString(this.type));
JSONObject styleObject = new JSONObject();
styleObject.put("className",new JSONString("feature"));
returnObject.put("style", styleObject);
returnObject.put("key", new JSONString(this.key));
if(topLevelFeatures!=null && topLevelFeatures.trim().length()>0){
returnObject.put("topLevelFeatures", new JSONString(this.topLevelFeatures));
}
if(category!=null && category.trim().length()>0){
returnObject.put("category", new JSONString(this.category));
}
JSONObject apolloDetails = new JSONObject();
apolloDetails.put("source", new JSONString("upload"));
apolloDetails.put("type", new JSONString(this.typeEnum.name()));
if(this.topType!=null){
apolloDetails.put("topType", new JSONString(this.topType));
}
returnObject.put("apollo", apolloDetails);
return returnObject;
}
static String generateString() {
String returnString = "";
for (int i = 0; i < 10; i++) {
returnString += String.valueOf(Math.round(Math.random() * 26));
}
return returnString;
}
public static JSONObject generateForTypeAndKeyAndCategory(TrackTypeEnum type, String key,String category,String topType) {
String randomFileName = key!=null && key.trim().length()>1 ? key : generateString() ;
switch (type) {
case BAM:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/BAM",
"raw/"+randomFileName.replaceAll(" ","_")+".bam",
randomFileName,
"JBrowse/View/Track/Alignments",
randomFileName,
category,
type
).toJSON();
case BAM_CANVAS:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/BAM",
"raw/"+randomFileName.replaceAll(" ","_")+".bam",
randomFileName,
"JBrowse/View/Track/Alignments2",
randomFileName,
category,
type
).toJSON();
case BIGWIG_HEAT_MAP:
return new TrackConfigurationTemplate(
"JBrowse/Store/BigWig",
"raw/"+randomFileName.replaceAll(" ","_")+".bw",
randomFileName,
"JBrowse/View/Track/Wiggle/Density",
randomFileName,
category,
type
).toJSON();
case BIGWIG_XY:
return new TrackConfigurationTemplate(
"JBrowse/Store/BigWig",
"raw/"+randomFileName.replaceAll(" ","_")+".bw",
randomFileName,
"JBrowse/View/Track/Wiggle/XYPlot",
randomFileName,
category,
type
).toJSON();
case VCF:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/VCFTabix",
"raw/"+randomFileName.replaceAll(" ","_")+".vcf.gz",
randomFileName,
"JBrowse/View/Track/HTMLVariants",
randomFileName,
category,
type
).toJSON();
case VCF_CANVAS:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/VCFTabix",
"raw/"+randomFileName.replaceAll(" ","_")+".vcf.gz",
randomFileName,
"JBrowse/View/Track/CanvasVariants",
randomFileName,
category,
type
).toJSON();
case GFF3:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/GFF3",
"raw/"+randomFileName.replaceAll(" ","_")+".gff",
randomFileName,
"JBrowse/View/Track/HTMLFeatures",
randomFileName,
category,
type,
topType,
topType
).toJSON();
case GFF3_CANVAS:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/GFF3",
"raw/"+randomFileName.replaceAll(" ","_")+".gff",
randomFileName,
"JBrowse/View/Track/CanvasFeatures",
randomFileName,
category,
type,
topType,
topType
).toJSON();
case GFF3_JSON:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/NCList",
"tracks/"+randomFileName.replaceAll(" ","_")+"/{refseq}/trackData.jsonz",
randomFileName,
"JBrowse/View/Track/HTMLFeatures",
randomFileName,
category,
type,
null,
topType
).toJSON();
case GFF3_JSON_CANVAS:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/NCList",
"tracks/"+randomFileName.replaceAll(" ","_")+"/{refseq}/trackData.jsonz",
randomFileName,
"JBrowse/View/Track/CanvasFeatures",
randomFileName,
category,
type,
null,
topType
).toJSON();
case GFF3_TABIX:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/GFF3Tabix",
"raw/"+randomFileName.replaceAll(" ","_")+".gff.gz",
randomFileName,
"JBrowse/View/Track/HTMLFeatures",
randomFileName,
category,
type,
topType,
topType
).toJSON();
case GFF3_TABIX_CANVAS:
return new TrackConfigurationTemplate(
"JBrowse/Store/SeqFeature/GFF3Tabix",
"raw/"+randomFileName.replaceAll(" ","_")+".gff.gz",
randomFileName,
"JBrowse/View/Track/CanvasFeatures",
randomFileName,
category,
type,
topType,
topType
).toJSON();
}
return null;
}
}
| 10,164 | 39.823293 | 126 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/ClientTokenGenerator.java
|
package org.bbop.apollo.gwt.shared;
import java.util.Random;
/**
* Created by ndunn on 4/15/16.
*/
public class ClientTokenGenerator {
private final static Random random = new Random(); // or SecureRandom
public static final int DEFAULT_LENGTH = 20 ;
public static final int MIN_TOKEN_LENGTH = 10;
public static String generateRandomString() {
return generateRandomString(DEFAULT_LENGTH);
}
public static String generateRandomString(int length) {
StringBuilder stringBuilder = new StringBuilder();
while(stringBuilder.length()<length){
stringBuilder.append(Math.abs(random.nextInt()));
}
return stringBuilder.toString();
}
public static boolean isValidToken(String clientID) {
return (clientID!=null && clientID.length()>MIN_TOKEN_LENGTH);
}
}
| 849 | 26.419355 | 73 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/FeatureStringEnum.java
|
package org.bbop.apollo.gwt.shared;
/**
* Created by ndunn on 4/2/15.
*/
public enum FeatureStringEnum {
ID,
FEATURES,
SUPPRESS_HISTORY,
SUPPRESS_EVENTS,
FEATURE_PROPERTY,
ANNOTATION_COUNT,
PARENT,
PARENT_ID,
PARENT_NAME,
USERNAME,
EDITOR,
TYPE,
PARENT_TYPE,
PROPERTIES,
TIMEACCESSION,
DEFAULT,
TIMELASTMODIFIED,
NOT,
NO_STATUS_ASSIGNED,
ANY_STATUS_ASSIGNED,
RESIDUES,
CHILDREN,
CDS("CDS"),
EXON("Exon"),
GENE("Gene"),
PSEUDOGENE("Pseudogene"),
STOP_CODON_READTHROUGH("stop_codon_read_through"),
STOP_CODON_READHTHROUGH_SUFFIX("-stop_codon_read_through"),
READTHROUGH_STOP_CODON,
TRANSCRIPT("Transcript"),
NONCANONICALFIVEPRIMESPLICESITE("NonCanonicalFivePrimeSpliceSite"),
NONCANONICALTHREEPRIMESPLICESITE("NonCanonicalThreePrimeSpliceSite"),
DATE_LAST_MODIFIED,
DATE_CREATION,
DATE,
CURRENT,
CURRENT_TAB,
COMMENT,
OLD_COMMENTS,
NEW_COMMENTS,
TAG_VALUE_DELIMITER("="),
COMMENTS,
CANNED_COMMENTS,
SUGGESTED_NAMES,
CANNED_KEYS,
CANNED_VALUES,
STATUS,
AVAILABLE_STATUSES,
NOTES,
TAG,
COMMON_DATA_DIRECTORY("common_data_directory"),
BAD_COMMON_PATH("badCommonPath"),
NON_RESERVED_PROPERTIES,
OLD_NON_RESERVED_PROPERTIES,
NEW_NON_RESERVED_PROPERTIES,
LOCATION,
COUNT,
CONFIRM,
FMIN,
FMAX,
OBSOLETE,
IS_FMIN_PARTIAL,
IS_FMAX_PARTIAL,
STRAND,
NAME,
GENE_NAME,
VALUE,
REMOTE_USER("REMOTE_USER"),
CV,
SEQUENCE,
SEQUENCE_DATA("sequenceData"),
SEARCH_DATABASE_DATA("searchDatabaseData"),
TRACK,
DB,
DBXREFS,
CLIENT_TOKEN("clientToken"),
IGNORE,
PREFERENCE,
ACCESSION,
CDS_SUFFIX("-CDS"),
MINUS1FRAMESHIFT("Minus1Frameshift"),
MINUS2FRAMESHIFT("Minus2Frameshift"),
PLUS1FRAMESHIFT("Plus1Frameshift"),
PLUS2FRAMESHIFT("Plus2Frameshift"),
DELETION_PREFIX("Deletion-"),
INSERTION_PREFIX("Insertion-"),
OWNER("owner"),
ORGANISM,
SYMBOL,
ALTERNATECVTERM("alternateCvTerm"),
DESCRIPTION,
ANNOTATION_INFO_EDITOR_CONFIGS,
HASDBXREFS("hasDbxrefs"),
HASATTRIBUTES("hasAttributes"),
HASPUBMEDIDS("hasPubmedIds"),
HASGOIDS("hasGoIds"),
GO_ANNOTATIONS,
HASCOMMENTS("hasComments"),
SUPPORTED_TYPES,
OLD_DBXREFS,
NEW_DBXREFS,
ATTRIBUTES,
PUBMEDIDS("pubmed_ids"),
GOIDS("go_ids"),
GENE_PRODUCT,
PROVENANCE,
DB_XREF,
SYNONYMS,
STRUCTURAL_EDIT,
HIGHLIGHTED_REGION("highlighted region"),
UNIQUENAME("uniquename"),
// TODO: move these to a SequenceTypeEnum
TYPE_PEPTIDE("peptide"),
TYPE_CDS("cds"),
TYPE_CDNA("cdna"),
TYPE_GENOMIC("genomic"),
TYPE_GPAD("GPAD"),
TYPE_GPAD2("GPAD2"),
TYPE_GPI2("GPI2"),
TYPE_GO("GO"),
TYPE_FASTA("FASTA"),
TYPE_GFF3("GFF3"),
TYPE_VCF("VCF"),
TYPE_CHADO("CHADO"),
TYPE_JBROWSE("JBROWSE"),
EXPORT_CHADO_CLEAN("chado_clean"),
EXPORT_CHADO_UPDATE("chado_update"),
EXPORT_ID("ID"),
EXPORT_DBXREF("Dbxref"),
EXPORT_NAME("Name"),
EXPORT_ALIAS("Alias"),
EXPORT_NOTE("Note"),
EXPORT_PARENT("Parent"),
ORGANISM_JBROWSE_DIRECTORY("organismJBrowseDirectory"),
ORGANISM_ID("organismId"),
ORGANISM_NAME("commonName"),
ORGANISM_DATA("organismData"),
SEQUENCE_NAME("sequenceName"),
DEFAULT_SEQUENCE_NAME("defaultSequenceName"),
PERMISSIONS,
ERROR,
ERROR_MESSAGE,
REQUEST_INDEX,
HAS_USERS,
USER_ID("userId"),
LOCKED,
HISTORY,
DOCK_OPEN("dockOpen"),
DOCK_WIDTH("dockWidth"),
USE_CDS,
USE_NAME,
TRACKS,
LABEL,
URL_TEMPLATE("urlTemplate"),
TRACK_DATA("trackData"),
TRACK_FILE("trackFile"),
TRACK_FILE_INDEX("trackFileIndex"),
TRACK_CONFIG("trackConfig"),
TRACK_LABEL("trackLabel"),
CREATOR("creator"),
START,
END,
SCORE,
NUMBER,
FILTER,
VALUES,
META,
BASES,
REFERENCE_ALLELE,
ALTERNATIVE_ALLELES,
GENOTYPES,
ALTERNATE_ALLELES,
OLD_ALTERNATE_ALLELES,
NEW_ALTERNATE_ALLELES,
ALLELE_FREQUENCY("allele_frequency"),
ALLELE_FREQUENCY_TAG("AF"),
ALLELE,
ALLELE_INFO,
NEW_ALLELE_INFO,
OLD_ALLELE_INFO,
VARIANT_INFO,
OLD_VARIANT_INFO,
NEW_VARIANT_INFO,
ORIG_ID,
;
private String value;
FeatureStringEnum(String value) {
this.value = value;
}
FeatureStringEnum() {
this.value = name().toLowerCase();
}
@Override
public String toString() {
return value;
}
public String getValue() {
return value;
}
public String getPretty() {
String pretty1 = value.replaceAll("_"," ");
return pretty1.substring(0,1).toUpperCase() + pretty1.substring(1);
}
}
| 5,684 | 24.958904 | 83 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/GlobalPermissionEnum.java
|
package org.bbop.apollo.gwt.shared;
import java.util.ArrayList;
import java.util.List;
/**
* The "value" is mapping for Apollo 1
* Relateive ranks are for allowing bulk accesses. For that WRITE access will have additional permissions (including export).
* We will likely be adding additional permissions, as well.
* <p>
* Created by ndunn on 3/31/15.
*/
public enum GlobalPermissionEnum implements Comparable<GlobalPermissionEnum> {
USER("user",10),
INSTRUCTOR("instructor",50),
ADMIN("admin",100);
private String display; // pertains to the 1.0 value
private Integer rank;
GlobalPermissionEnum(String display , int rank) {
this.display = display;
this.rank = rank;
}
public String getLookupKey(){
return name().toLowerCase();
}
public String getDisplay() {
return display;
}
public static GlobalPermissionEnum getValueForString(String input) {
for (GlobalPermissionEnum permissionEnum : values()) {
if (permissionEnum.name().equals(input))
return permissionEnum;
}
return null;
}
public static List<GlobalPermissionEnum> getValueForArray(List<String> inputs) {
List<GlobalPermissionEnum> permissionEnumList = new ArrayList<>();
for (String input : inputs) {
permissionEnumList.add(getValueForString(input));
}
return permissionEnumList;
}
public Integer getRank() {
return rank;
}
// @Override
// public int compareTo(PermissionEnum o) {
// return value - o.getValue();
// }
}
| 1,625 | 24.40625 | 127 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/PermissionEnum.java
|
package org.bbop.apollo.gwt.shared;
import java.util.ArrayList;
import java.util.List;
/**
* The "value" is mapping for Apollo 1
* Relateive ranks are for allowing bulk accesses. For that WRITE access will have additional permissions (including export).
* We will likely be adding additional permissions, as well.
* <p>
* Created by ndunn on 3/31/15.
*/
public enum PermissionEnum implements Comparable<PermissionEnum> {
NONE(0, 0),
READ(1, 10),
EXPORT(7, 30),
WRITE(3, 50),
ADMINISTRATE(15, 70),
ALL_ORGANISM_ADMIN(100, 100);
private Integer value; // pertains to the 1.0 value
private Integer rank;
PermissionEnum(int oldValue, int rank) {
this.value = oldValue;
this.rank = rank;
}
public String getDisplay() {
return name().toLowerCase();
}
public static PermissionEnum getValueForString(String input) {
for (PermissionEnum permissionEnum : values()) {
if (permissionEnum.name().equals(input))
return permissionEnum;
}
return null;
}
public static PermissionEnum getValueForOldInteger(Integer input) {
for (PermissionEnum permissionEnum : values()) {
if (permissionEnum.value.equals(input))
return permissionEnum;
}
return null;
}
public static List<PermissionEnum> getValueForArray(List<String> inputs) {
List<PermissionEnum> permissionEnumList = new ArrayList<>();
for (String input : inputs) {
permissionEnumList.add(getValueForString(input));
}
return permissionEnumList;
}
public Integer getValue() {
return value;
}
public Integer getRank() {
return rank;
}
// @Override
// public int compareTo(PermissionEnum o) {
// return value - o.getValue();
// }
}
| 1,882 | 24.794521 | 127 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/geneProduct/GeneProduct.java
|
package org.bbop.apollo.gwt.shared.geneProduct;
import java.util.ArrayList;
import java.util.List;
public class GeneProduct {
private Long id;
private String feature; // I think this is the gene it refers to? I think the uniquename
private String productName ;
private Reference reference;
private List<String> noteList;
private boolean alternate = false;
private String evidenceCode;
private String evidenceCodeLabel;
private List<WithOrFrom> withOrFromList;
public String getEvidenceCode() {
return evidenceCode;
}
public void setEvidenceCode(String evidenceCode) {
this.evidenceCode = evidenceCode;
}
public String getFeature() {
return feature;
}
public void setFeature(String feature) {
this.feature = feature;
}
public boolean isAlternate() {
return alternate;
}
public void setAlternate(boolean alternate) {
this.alternate = alternate;
}
public Reference getReference() {
return reference;
}
public void setReference(Reference reference) {
this.reference = reference;
}
public List<WithOrFrom> getWithOrFromList() {
return withOrFromList;
}
public void setWithOrFromList(List<WithOrFrom> withOrFromList) {
this.withOrFromList = withOrFromList;
}
public void addWithOrFrom(WithOrFrom withOrFrom) {
if (withOrFromList == null) {
withOrFromList = new ArrayList<>();
}
withOrFromList.add(withOrFrom);
}
public String getWithOrFromString() {
StringBuilder withOrFromStringBuilder = new StringBuilder();
for (WithOrFrom withOrFrom : getWithOrFromList()) {
withOrFromStringBuilder.append(withOrFrom.getDisplay());
withOrFromStringBuilder.append(" ");
}
return withOrFromStringBuilder.toString();
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getEvidenceCodeLabel() {
return evidenceCodeLabel;
}
public void setEvidenceCodeLabel(String evidenceCodeLabel) {
this.evidenceCodeLabel = evidenceCodeLabel;
}
public String getProductName() {
return productName;
}
public void setProductName(String productName) {
this.productName = productName;
}
public List<String> getNoteList() {
return noteList;
}
public void setNoteList(List<String> noteList) {
this.noteList = noteList;
}
}
| 2,367 | 20.527273 | 90 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/geneProduct/Reference.java
|
package org.bbop.apollo.gwt.shared.geneProduct;
public class Reference {
private String prefix;
private String lookupId;
public final static String NOT_PROVIDED = "NOT_PROVIDED";
public final static String UNKNOWN = "UNKNOWN";
public Reference(String display) {
assert display.contains(":");
this.prefix = display.split(":")[0];
this.lookupId = display.split(":")[1];
}
public Reference(String prefix,String id) {
this.prefix = prefix ;
this.lookupId = id ;
}
public static Reference createEmptyReference() {
return new Reference(NOT_PROVIDED,UNKNOWN);
}
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getLookupId() {
return lookupId;
}
public void setLookupId(String lookupId) {
this.lookupId = lookupId;
}
public String getReferenceString() {
return prefix + ":" + lookupId;
}
//
}
| 1,039 | 20.666667 | 61 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/geneProduct/WithOrFrom.java
|
package org.bbop.apollo.gwt.shared.geneProduct;
public class WithOrFrom {
private String prefix;
private String lookupId;
public WithOrFrom(String prefix, String lookup) {
this.prefix = prefix;
this.lookupId = lookup;
}
public WithOrFrom(String lookup) {
this.prefix = lookup.split(":")[0];
this.lookupId = lookup.split(":")[1];
}
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getLookupId() {
return lookupId;
}
public void setLookupId(String lookupId) {
this.lookupId = lookupId;
}
public String getDisplay() {
return prefix + ":" + lookupId;
}
}
| 769 | 19.263158 | 53 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/go/Aspect.java
|
package org.bbop.apollo.gwt.shared.go;
public enum Aspect {
BP("biological process"),
MF("molecular function"),
CC("cellular component"),
;
private String lookup;
private Aspect(String lookupValue){
this.lookup = lookupValue ;
}
public String getLookup() {
return lookup;
}
}
| 333 | 15.7 | 39 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/go/GoAnnotation.java
|
package org.bbop.apollo.gwt.shared.go;
import java.util.ArrayList;
import java.util.List;
public class GoAnnotation {
private Long id;
private Aspect aspect;
private String gene; // I think tis is the gene it refers to? I think the uniquename
private String goTerm;
private String goTermLabel;
private String geneRelationship;
private String evidenceCode;
private String evidenceCodeLabel;
private boolean negate = false;
private List<WithOrFrom> withOrFromList;
private List<String> noteList;
private Reference reference;
public Aspect getAspect() {
return aspect;
}
public void setAspect(Aspect aspect) {
this.aspect = aspect;
}
public String getEvidenceCode() {
return evidenceCode;
}
public void setEvidenceCode(String evidenceCode) {
this.evidenceCode = evidenceCode;
}
public String getGene() {
return gene;
}
public void setGene(String gene) {
this.gene = gene;
}
public String getGoTerm() {
return goTerm;
}
public void setGoTerm(String goTerm) {
this.goTerm = goTerm;
}
public String getGeneRelationship() {
return geneRelationship;
}
public void setGeneRelationship(String geneRelationship) {
this.geneRelationship = geneRelationship;
}
public boolean isNegate() {
return negate;
}
public void setNegate(boolean negate) {
this.negate = negate;
}
public List<String> getNoteList() {
return noteList;
}
public void setNoteList(List<String> noteList) {
this.noteList = noteList;
}
public Reference getReference() {
return reference;
}
public void setReference(Reference reference) {
this.reference = reference;
}
public List<WithOrFrom> getWithOrFromList() {
return withOrFromList;
}
public void setWithOrFromList(List<WithOrFrom> withOrFromList) {
this.withOrFromList = withOrFromList;
}
public void addWithOrFrom(WithOrFrom withOrFrom) {
if (withOrFromList == null) {
withOrFromList = new ArrayList<>();
}
withOrFromList.add(withOrFrom);
}
public void addNote(String note) {
if (noteList == null) {
noteList = new ArrayList<>();
}
noteList.add(note);
}
public String getWithOrFromString() {
StringBuilder withOrFromStringBuilder = new StringBuilder();
for (WithOrFrom withOrFrom : getWithOrFromList()) {
withOrFromStringBuilder.append(withOrFrom.getDisplay());
withOrFromStringBuilder.append(" ");
}
return withOrFromStringBuilder.toString();
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getGoTermLabel() {
return goTermLabel;
}
public void setGoTermLabel(String goTermLabel) {
this.goTermLabel = goTermLabel;
}
public String getEvidenceCodeLabel() {
return evidenceCodeLabel;
}
public void setEvidenceCodeLabel(String evidenceCodeLabel) {
this.evidenceCodeLabel = evidenceCodeLabel;
}
}
| 2,972 | 19.93662 | 86 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/go/GoGene.java
|
package org.bbop.apollo.gwt.shared.go;
/**
* Placeholder for a feature
*/
public class GoGene {
// points to annotation features
private Long id;
private String name ;
private String uniqueName ;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getUniqueName() {
return uniqueName;
}
public void setUniqueName(String uniqueName) {
this.uniqueName = uniqueName;
}
}
| 628 | 16 | 50 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/go/GoTerm.java
|
package org.bbop.apollo.gwt.shared.go;
/**
* Placeholder for a feature with a single annotaiton
*/
public class GoTerm {
// Long id;
private String name;
private String prefix;
private String lookupId;
public GoTerm(String lookup) {
if(lookup.contains(":")){
this.prefix = lookup.split(":")[0];
this.lookupId = lookup.split(":")[1];
}
else{
this.name = lookup ;
}
}
public GoTerm(String lookup,String name ) {
this.prefix = lookup.split(":")[0];
this.lookupId = lookup.split(":")[1];
this.name = name ;
}
// GoGene goGene ;
// public Long getId() {
// return id;
// }
//
// public void setId(Long id) {
// this.id = id;
// }
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getLookupId() {
return lookupId;
}
public void setLookupId(String lookupId) {
this.lookupId = lookupId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
// public GoGene getGoGene() {
// return goGene;
// }
//
// public void setGoGene(GoGene goGene) {
// this.goGene = goGene;
// }
public String getLinkDisplay() {
return prefix + ":" + lookupId;
}
}
| 1,458 | 18.453333 | 53 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/go/Qualifier.java
|
package org.bbop.apollo.gwt.shared.go;
public enum Qualifier {
NOT,
CONTRIBUTES_TO
}
| 95 | 11 | 38 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/go/Reference.java
|
package org.bbop.apollo.gwt.shared.go;
public class Reference {
private String prefix;
private String lookupId;
public final static String NOT_PROVIDED = "NOT_PROVIDED";
public final static String UNKNOWN = "UNKNOWN";
public Reference(String display) {
assert display.contains(":");
this.prefix = display.split(":")[0];
this.lookupId = display.split(":")[1];
}
public Reference(String prefix,String id) {
this.prefix = prefix ;
this.lookupId = id ;
}
public static Reference createEmptyReference() {
return new Reference(NOT_PROVIDED,UNKNOWN);
}
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getLookupId() {
return lookupId;
}
public void setLookupId(String lookupId) {
this.lookupId = lookupId;
}
public String getReferenceString() {
return prefix + ":" + lookupId;
}
//
}
| 1,020 | 20.270833 | 61 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/go/WithOrFrom.java
|
package org.bbop.apollo.gwt.shared.go;
public class WithOrFrom {
private String prefix;
private String lookupId;
public WithOrFrom(String prefix, String lookup) {
this.prefix = prefix;
this.lookupId = lookup;
}
public WithOrFrom(String lookup) {
this.prefix = lookup.split(":")[0];
this.lookupId = lookup.split(":")[1];
}
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getLookupId() {
return lookupId;
}
public void setLookupId(String lookupId) {
this.lookupId = lookupId;
}
public String getDisplay() {
return prefix + ":" + lookupId;
}
}
| 760 | 19.026316 | 53 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/provenance/Provenance.java
|
package org.bbop.apollo.gwt.shared.provenance;
import java.util.ArrayList;
import java.util.List;
public class Provenance {
private Long id;
private String feature; // I think this is the gene it refers to? I think the uniquename
private String field;
private Reference reference;
private List<String> noteList;
private String evidenceCode;
private String evidenceCodeLabel;
private List<WithOrFrom> withOrFromList;
public String getEvidenceCode() {
return evidenceCode;
}
public void setEvidenceCode(String evidenceCode) {
this.evidenceCode = evidenceCode;
}
public String getFeature() {
return feature;
}
public void setFeature(String feature) {
this.feature = feature;
}
public Reference getReference() {
return reference;
}
public void setReference(Reference reference) {
this.reference = reference;
}
public List<WithOrFrom> getWithOrFromList() {
return withOrFromList;
}
public void setWithOrFromList(List<WithOrFrom> withOrFromList) {
this.withOrFromList = withOrFromList;
}
public void addWithOrFrom(WithOrFrom withOrFrom) {
if (withOrFromList == null) {
withOrFromList = new ArrayList<>();
}
withOrFromList.add(withOrFrom);
}
public String getWithOrFromString() {
StringBuilder withOrFromStringBuilder = new StringBuilder();
for (WithOrFrom withOrFrom : getWithOrFromList()) {
withOrFromStringBuilder.append(withOrFrom.getDisplay());
withOrFromStringBuilder.append(" ");
}
return withOrFromStringBuilder.toString();
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getEvidenceCodeLabel() {
return evidenceCodeLabel;
}
public void setEvidenceCodeLabel(String evidenceCodeLabel) {
this.evidenceCodeLabel = evidenceCodeLabel;
}
public String getField() {
return field;
}
public void setField(String field) {
this.field = field;
}
public List<String> getNoteList() {
return noteList;
}
public void setNoteList(List<String> noteList) {
this.noteList = noteList;
}
}
| 2,140 | 20.19802 | 90 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/provenance/ProvenanceField.java
|
package org.bbop.apollo.gwt.shared.provenance;
public enum ProvenanceField {
TYPE,
SYMBOL,
NAME,
SYNONYM,
DESCRIPTION,
DB_XREF,
ATTRIBUTE,
COMMENT,
}
| 183 | 13.153846 | 46 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/provenance/Reference.java
|
package org.bbop.apollo.gwt.shared.provenance;
public class Reference {
private String prefix;
private String lookupId;
public final static String NOT_PROVIDED = "NOT_PROVIDED";
public final static String UNKNOWN = "UNKNOWN";
public Reference(String display) {
assert display.contains(":");
this.prefix = display.split(":")[0];
this.lookupId = display.split(":")[1];
}
public Reference(String prefix,String id) {
this.prefix = prefix ;
this.lookupId = id ;
}
public static Reference createEmptyReference() {
return new Reference(NOT_PROVIDED,UNKNOWN);
}
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getLookupId() {
return lookupId;
}
public void setLookupId(String lookupId) {
this.lookupId = lookupId;
}
public String getReferenceString() {
return prefix + ":" + lookupId;
}
//
}
| 1,038 | 20.645833 | 61 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/provenance/WithOrFrom.java
|
package org.bbop.apollo.gwt.shared.provenance;
public class WithOrFrom {
private String prefix;
private String lookupId;
public WithOrFrom(String prefix, String lookup) {
this.prefix = prefix;
this.lookupId = lookup;
}
public WithOrFrom(String lookup) {
this.prefix = lookup.split(":")[0];
this.lookupId = lookup.split(":")[1];
}
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getLookupId() {
return lookupId;
}
public void setLookupId(String lookupId) {
this.lookupId = lookupId;
}
public String getDisplay() {
return prefix + ":" + lookupId;
}
}
| 768 | 19.236842 | 53 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/sequence/SearchHit.java
|
package org.bbop.apollo.gwt.shared.sequence;
public class SearchHit {
String id;
Long start;
Long end;
Integer strand;
Double score;
Double significance;
Double identity;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Long getStart() {
return start;
}
public void setStart(Long start) {
this.start = start;
}
public Long getEnd() {
return end;
}
public void setEnd(Long end) {
this.end = end;
}
public Double getScore() {
return score;
}
public void setScore(Double score) {
this.score = score;
}
public Double getSignificance() {
return significance;
}
public void setSignificance(Double significance) {
this.significance = significance;
}
public Double getIdentity() {
return identity;
}
public void setIdentity(Double identity) {
this.identity = identity;
}
public String getLocation(){
return getId()+":"+getStart()+".."+getEnd();
}
public Integer getStrand() {
return strand;
}
public void setStrand(Integer strand) {
this.strand = strand;
}
}
| 1,143 | 14.888889 | 52 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/sequence/SearchResults.java
|
package org.bbop.apollo.gwt.shared.sequence;
import java.util.List;
public class SearchResults {
List<SearchHit> searchHitList;
public List<SearchHit> getSearchHitList() {
return searchHitList;
}
public void setSearchHitList(List<SearchHit> searchHitList) {
this.searchHitList = searchHitList;
}
}
| 321 | 17.941176 | 63 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/track/NclistColumnEnum.java
|
package org.bbop.apollo.gwt.shared.track;
/**
* Created by nathandunn on 12/3/15.
*/
public enum NclistColumnEnum {
START,
END,
STRAND,
SCORE,
TYPE,
SUBFEATURES,
SUBLIST,
SEQ_ID,
ID,
SOURCE,
NAME,
ALIAS,
CHUNK,
PHASE;
private String value;
NclistColumnEnum(String value) {
this.value = value;
}
NclistColumnEnum() {
this.value = name().substring(0, 1).toUpperCase() + name().substring(1).toLowerCase();
}
public String getValue() {
return value;
}
}
| 566 | 14.324324 | 94 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/track/SequenceTypeEnum.java
|
package org.bbop.apollo.gwt.shared.track;
public enum SequenceTypeEnum {
FA("fa"),
FASTA("fasta"),
FNA("fna"),
FA_GZ("fa.gz","gz"),
FASTA_GZ("fasta.gz","gz"),
FNA_GZ("fna.gz","gz"),
FA_ZIP("fa.zip","zip"),
FASTA_ZIP("fasta.zip","zip"),
FNA_ZIP("fna.zip","zip"),
FA_TGZ("fa.tgz","tar.gz"),
FASTA_TGZ("fasta.tgz","tar.gz"),
FNA_TGZ("fna.tgz","tar.gz"),
FA_TAR_GZ("fa.tar.gz","tar.gz"),
FASTA_TAR_GZ("fasta.tar.gz","tar.gz"),
FNA_TAR_GZ("fna.tar.gz","tar.gz");
private final String suffix ;
private final String compression;
SequenceTypeEnum(String suffix){
this(suffix,null);
}
SequenceTypeEnum(String suffix, String compression){
this.suffix = suffix;
this.compression = compression ;
}
public String getSuffix() {
return suffix;
}
public String getCompression() {
return compression;
}
public String getCorrectedSuffix(){
return FA.suffix + (compression!=null ? "." + compression : "") ;
}
public static SequenceTypeEnum getSequenceTypeForFile(String fileName){
for(SequenceTypeEnum sequenceTypeEnum : values()){
if(fileName.endsWith(sequenceTypeEnum.suffix)){
return sequenceTypeEnum ;
}
}
return null ;
}
public static String generateSuffixDescription(){
String returnString = "";
for(SequenceTypeEnum s: values()){
returnString += s.suffix + " " ;
}
returnString = returnString.trim();
returnString = returnString.replaceAll(" ",", ");
return returnString;
}
@Override
public String toString() {
return "SequenceTypeEnum{" +
"suffix='" + suffix + '\'' +
", compression='" + compression + '\'' +
'}';
}
}
| 1,884 | 24.821918 | 75 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/track/TrackIndex.java
|
package org.bbop.apollo.gwt.shared.track;
/**
* Created by nathandunn on 12/2/15.
*/
public class TrackIndex {
// index locations . . .
private Integer start;
private Integer end;
private Integer source;
private Integer strand;
private Integer phase;
private Integer type;
private Integer score;
private Integer chunk;
private Integer id;
private Integer subFeaturesColumn;
private Integer name ;
private Integer alias;
private Integer seqId;
private Integer classIndex;
private Integer sublistColumn;// unclear if this has a column . . I think its just the last column . . or just implies "chunk"
// set from intake
private String trackName;
private String organism;
public void fixCoordinates() {
start = start == 0 ? null : start;
end = end == 0 ? null : end;
source = source == 0 ? null : source;
strand = strand == 0 ? null : strand;
phase = phase == 0 ? null : phase;
type = type == 0 ? null : type;
score = score == 0 ? null : score;
chunk = chunk == 0 ? null : chunk;
id = id == 0 ? null : id;
subFeaturesColumn = subFeaturesColumn == 0 ? null : subFeaturesColumn;
sublistColumn = sublistColumn == 0 ? null : sublistColumn;
}
public Boolean hasChunk() {
return chunk != null && chunk > 0;
}
public Boolean hasSubFeatures() {
return subFeaturesColumn != null && subFeaturesColumn > 0;
}
public Boolean hasSubList() {
return sublistColumn != null && sublistColumn > 0;
}
public Integer getStart() {
return start;
}
public void setStart(Integer start) {
this.start = start;
}
public Integer getEnd() {
return end;
}
public void setEnd(Integer end) {
this.end = end;
}
public Integer getSource() {
return source;
}
public void setSource(Integer source) {
this.source = source;
}
public Integer getStrand() {
return strand;
}
public void setStrand(Integer strand) {
this.strand = strand;
}
public Integer getPhase() {
return phase;
}
public void setPhase(Integer phase) {
this.phase = phase;
}
public Integer getType() {
return type;
}
public void setType(Integer type) {
this.type = type;
}
public Integer getScore() {
return score;
}
public void setScore(Integer score) {
this.score = score;
}
public Integer getChunk() {
return chunk;
}
public void setChunk(Integer chunk) {
this.chunk = chunk;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer getSubFeaturesColumn() {
return subFeaturesColumn;
}
public void setSubFeaturesColumn(Integer subFeaturesColumn) {
this.subFeaturesColumn = subFeaturesColumn;
}
public Integer getSublistColumn() {
return sublistColumn;
}
public void setSublistColumn(Integer sublistColumn) {
this.sublistColumn = sublistColumn;
}
public String getTrackName() {
return trackName;
}
public void setTrackName(String trackName) {
this.trackName = trackName;
}
public String getOrganism() {
return organism;
}
public void setOrganism(String organism) {
this.organism = organism;
}
public Integer getSeqId() {
return seqId;
}
public void setSeqId(Integer seqId) {
this.seqId = seqId;
}
public Integer getClassIndex() {
return classIndex;
}
public void setClassIndex(Integer classIndex) {
this.classIndex = classIndex;
}
public Integer getName() {
return name;
}
public void setName(Integer name) {
this.name = name;
}
public Integer getAlias() {
return alias;
}
public void setAlias(Integer alias) {
this.alias = alias;
}
}
| 4,117 | 20.010204 | 130 |
java
|
Apollo
|
Apollo-master/src/gwt/org/bbop/apollo/gwt/shared/track/TrackTypeEnum.java
|
package org.bbop.apollo.gwt.shared.track;
public enum TrackTypeEnum {
BAM("bam","bam.bai"),
BAM_CANVAS("bam","bam.bai"),
BIGWIG_HEAT_MAP("bw"),
BIGWIG_XY("bw"),
VCF("vcf.gz","vcf.gz.tbi"),
VCF_CANVAS("vcf.gz","vcf.gz.tbi"),
// GFF3_JSON(new String[]{"gff","gff3","gff.gz","gff3.gz"}),
// GFF3_JSON_CANVAS(new String[]{"gff","gff3","gff.gz","gff3.gz"}),
GFF3_JSON(new String[]{"gff","gff3"}),
GFF3_JSON_CANVAS(new String[]{"gff","gff3"}),
GFF3(new String[]{"gff","gff3","gff.gz","gff3.gz"}),
GFF3_CANVAS(new String[]{"gff","gff3","gff.gz","gff3.gz"}),
GFF3_TABIX(new String[]{"gff.gz","gff3.gz"},new String[]{"gff.gz.tbi","gff3.gz.tbi"}),
GFF3_TABIX_CANVAS(new String[]{"gff.gz","gff3.gz"},new String[]{"gff.gz.tbi","gff3.gz.tbi"});
private String[] suffix ;
private String[] suffixIndex ;
TrackTypeEnum(String suffix){
this(new String[]{suffix},null);
}
TrackTypeEnum(String suffix,String suffixIndex){
this(new String[]{suffix},new String[]{suffixIndex});
}
TrackTypeEnum(String[] suffix){
this.suffix = suffix;
this.suffixIndex = null ;
}
TrackTypeEnum(String[] suffix,String[] suffixIndex){
this.suffix = suffix;
this.suffixIndex = suffixIndex;
}
public boolean isIndexed() {
return this.suffixIndex!=null ;
}
public String[] getSuffix() {
return suffix;
}
public String[] getSuffixIndex() {
return suffixIndex;
}
@Override
public String toString() {
return name().replaceAll("_"," ");
}
public boolean hasSuffix(String input){
for(String s : suffix){
if(input.endsWith(s)) return true ;
}
return false ;
}
public boolean hasSuffixIndex(String input){
for(String s : suffixIndex){
if(input.endsWith(s)) return true ;
}
return false ;
}
public String getSuffixString() {
String returnString = "";
for(String s : suffix){
returnString += "*."+s+" ";
}
return returnString;
}
public String getSuffixIndexString() {
String returnString = "";
for(String s : suffixIndex){
returnString += "*."+s+" ";
}
return returnString;
}
}
| 2,344 | 25.055556 | 97 |
java
|
Apollo
|
Apollo-master/src/java/org/bbop/apollo/ImprovedH2Dialect.java
|
package org.bbop.apollo;
import org.hibernate.dialect.H2Dialect;
public class ImprovedH2Dialect extends H2Dialect {
@Override
public String getDropSequenceString(String sequenceName) {
// Adding the "if exists" clause to avoid warnings
return "drop sequence if exists " + sequenceName;
}
@Override
public boolean dropConstraints() {
// We don't need to drop constraints before dropping tables, that just
// leads to error messages about missing tables when we don't have a
// schema in the database
return false;
}
}
| 592 | 28.65 | 78 |
java
|
Apollo
|
Apollo-master/src/java/org/bbop/apollo/ImprovedPostgresDialect.java
|
package org.bbop.apollo;
import org.hibernate.dialect.PostgresPlusDialect;
public class ImprovedPostgresDialect extends PostgresPlusDialect {
@Override
public String getDropSequenceString(String sequenceName) {
// Adding the "if exists" clause to avoid warnings
return "drop sequence if exists " + sequenceName;
}
@Override
public boolean dropConstraints() {
// We don't need to drop constraints before dropping tables, that just
// leads to error messages about missing tables when we don't have a
// schema in the database
return false;
}
}
| 618 | 29.95 | 78 |
java
|
Apollo
|
Apollo-master/src/java/org/bbop/apollo/Pair.java
|
package org.bbop.apollo;
public class Pair<T, U> {
private T first;
private U second;
public Pair(T first, U second) {
this.first = first;
this.second = second;
}
public Pair(Pair<T, U> pair) {
this.first = pair.first;
this.second = pair.second;
}
public T getFirst() {
return first;
}
public U getSecond() {
return second;
}
public String toString() {
return "[" + first.toString() + ", " + second.toString() + "]";
}
}
| 552 | 17.433333 | 71 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/classifiers/DummyClassifier.java
|
/*
* Copyright 2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package classifiers;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
public class DummyClassifier {
public DummyClassifier()
{
}
public void parseStreamAndClassify(String jsonFile,String resultsFile) throws IOException {
String journalName;
int count = 0;
int abstract_count=0;
try {
JsonReader reader = new JsonReader(new InputStreamReader(new FileInputStream(jsonFile)));
JsonWriter writer = new JsonWriter(new OutputStreamWriter(new FileOutputStream(resultsFile), "UTF-8"));
writer.setIndent(" ");
//reader.setLenient(true);
reader.beginArray();
writer.beginArray();
while (reader.hasNext()) {
reader.beginObject();
writer.beginObject();
while (reader.hasNext()) {
String name = reader.nextName();
if (name.equals("abstract")) {
abstract_count++;
reader.skipValue();
} else if (name.equals("pmid")) {
String pmid = reader.nextString();
writer.name("labels");
writeLabels(writer);
writer.name("pmid").value(pmid);
} else if (name.equals("title")){
reader.skipValue();
}
else{
System.out.println(name);
reader.skipValue();
}
}
reader.endObject();
writer.endObject();
}
reader.endArray();
writer.endArray();
System.out.println("Abstracts: "+abstract_count);
writer.close();
} catch (FileNotFoundException ex) {
}
}
public void writeLabels(JsonWriter writer) throws IOException {
writer.beginArray();
for(int i=0;i<15;i++){
writer.value("D005124");
}
writer.endArray();
}
public static void main(String args[])
{
DummyClassifier dc = new DummyClassifier();
try {
dc.parseStreamAndClassify(args[0], args[1]);
} catch (IOException ex) {
}
}
}
| 3,564 | 29.470085 | 115 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/converters/MapMeshResults.java
|
/*
* Copyright 2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package converters;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
/** This script called to prepare files for BioASQ Task A evaluation, for flat measures.
*
* Example call
* java -cp BioASQEvaluation2018.jar converters.MapMeshResults "...\mesh_year_INT.txt" "...\labels_to_map.txt" "...\labels_mapped.txt"
* *labels_mapped.txt is the output file name
*
* @author tasosnent
*/
public class MapMeshResults {
Map mapping;
public MapMeshResults()
{
mapping = new TreeMap();
}
public void loadMapping(String mapfile)
{
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(mapfile));
String line;
while((line=br.readLine())!=null){
String nodes[] = line.split("\\s+");
mapping.put(nodes[0],Integer.parseInt(nodes[1]));
}
br.close();
}catch(IOException ex){
}
}
public void mapMeshResults(String source,String target)
{
BufferedReader br = null;
BufferedWriter bw = null;
try {
br = new BufferedReader(new FileReader(source));
bw = new BufferedWriter(new FileWriter(target));
String line;
while((line=br.readLine())!=null){
String labels[] = line.split("\\s+");
for(int i=0;i<labels.length;i++){
Integer lab = (Integer)mapping.get(labels[i]);
if(lab!=null)
bw.write(lab.intValue()+" ");
}
bw.write("\n");
}
br.close();
bw.close();
}catch(IOException ex){
}
}
public static void main(String[] args){
MapMeshResults mapres = new MapMeshResults();
mapres.loadMapping(args[0]);
mapres.mapMeshResults(args[1], args[2]);
}
}
| 3,054 | 29.247525 | 140 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/converters/OBOToHier.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package converters;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
public class OBOToHier {
Map parent_child;
Map parent_child_int;
Map mapping;
Map id_name_map;
public OBOToHier()
{
parent_child = new TreeMap();
parent_child_int = new TreeMap();
mapping = new TreeMap();
id_name_map = new TreeMap();
}
public void convertOboToHier(String source)
{
int row=0;
try {
BufferedReader bf = new BufferedReader(new FileReader(source));
String line="";
int class_ids=1;
while((line=bf.readLine())!=null)
{
row++;
if(line.startsWith("[Term]"))
{
line = bf.readLine();
row++;
String ids[] = line.split("\\s+");
String id = ids[1];
if(mapping.get(id)==null)
{
mapping.put(id, new Integer(class_ids));
class_ids++;
}
line = bf.readLine(); // here we get the name
row++;
String name = line.substring(line.indexOf(' ')+1);
id_name_map.put(name,id);
while(!line.startsWith("is_a"))
{
line = bf.readLine(); row++;
}
ArrayList<String> children = new ArrayList<String>();
ArrayList<Integer> children_int = new ArrayList<Integer>();
while(line!=null&&line.startsWith("is_a")){
String tokens[] = line.split("\\s+");
String parent = tokens[1];
if(mapping.get(parent)==null)
{
mapping.put(parent, new Integer(class_ids));
class_ids++;
}
ArrayList<String>ch = (ArrayList<String>)parent_child.get(parent);
ArrayList<Integer>ch_int = (ArrayList<Integer>)parent_child_int.get((Integer)mapping.get(parent));
if(ch==null)
{
children.add(id);
parent_child.put(parent,children);
children_int.add((Integer)mapping.get(id));
parent_child_int.put((Integer)mapping.get(parent),children_int);
}
else
{
ch.add(id);
ch_int.add((Integer)mapping.get(id));
}
line = bf.readLine();
row++;
}
}
}
} catch (IOException ex) {
}catch(NullPointerException npe){System.out.println(row);}
}
public void exportToFile(String hier)
{
try {
BufferedWriter bw = new BufferedWriter(new FileWriter(hier));
Iterator iter = parent_child.keySet().iterator();
while(iter.hasNext())
{
String parent = (String)iter.next();
ArrayList<String> children = (ArrayList<String>)parent_child.get(parent);
for(int k=0;k<children.size();k++)
{
bw.write(parent+" "+children.get(k)+"\n");
}
}
bw.close();
} catch (IOException ex) {
}
}
public void exportMapping(String mapp)
{
try {
BufferedWriter bw = new BufferedWriter(new FileWriter(mapp));
Iterator iter =mapping.keySet().iterator();
while(iter.hasNext())
{
String parent = (String)iter.next();
Integer int_id = (Integer)mapping.get(parent);
bw.write(parent+" "+int_id.intValue()+"\n");
}
bw.close();
} catch (IOException ex) {
}
}
public void exportNameIdMapping(String mapp)
{
try {
BufferedWriter bw = new BufferedWriter(new FileWriter(mapp));
Iterator iter =id_name_map.keySet().iterator();
while(iter.hasNext())
{
String name = (String)iter.next();
String id = (String)id_name_map.get(name);
bw.write(name+"="+id+"\n");
}
bw.close();
} catch (IOException ex) {
}
}
public void exportToFileInt(String hier)
{
try {
BufferedWriter bw = new BufferedWriter(new FileWriter(hier));
Iterator iter = parent_child_int.keySet().iterator();
while(iter.hasNext())
{
Integer parent = (Integer)iter.next();
ArrayList<Integer> children = (ArrayList<Integer>)parent_child_int.get(parent);
for(int k=0;k<children.size();k++)
{
bw.write(parent.intValue()+" "+children.get(k).intValue()+"\n");
}
}
bw.close();
} catch (IOException ex) {
}
}
public static void main(String args[])
{
OBOToHier bob2hier = new OBOToHier();
bob2hier.convertOboToHier(args[0]);
bob2hier.exportToFile(args[1]);
bob2hier.exportToFileInt(args[2]);
bob2hier.exportMapping(args[3]);
bob2hier.exportNameIdMapping(args[4]);
}
}
| 7,254 | 30.681223 | 125 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/core/Graph.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package core;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
public class Graph {
Map parent_child; //parent child relations, for each parent we keep an ArrayList of children
Map child_parent;
Set graph_parents;
Set graph_children;
public Graph()
{
parent_child = new TreeMap();
child_parent = new TreeMap();
graph_children = new TreeSet<Integer>();
graph_parents = new TreeSet<Integer>();
}
public void loadGraphFromFile(String filename){
BufferedReader br = null;
int row=0;
int fathers=0;
try {
br = new BufferedReader(new FileReader(filename));
String line;
while((line=br.readLine())!=null){
row++;
String nodes[] = line.split("\\s+");
if(nodes.length!=2)
{
System.out.println("Error with parsing. Please check file.\n"
+ "Line: "+row);
System.exit(0);
}
int father = Integer.parseInt(nodes[0]);
int child = Integer.parseInt(nodes[1]);
graph_children.add(new Integer(child));
graph_parents.add(new Integer(father));
if(!parent_child.containsKey(father)){
ArrayList<Integer> children= new ArrayList<Integer>();
children.add(child);
parent_child.put(new Integer(father), children);
fathers++;
}
else{
ArrayList<Integer> get = (ArrayList<Integer>)parent_child.get(father);
get.add(child);
}
if(!child_parent.containsKey(child)){
ArrayList<Integer> myparents = new ArrayList<Integer>();
myparents.add(new Integer(father));
child_parent.put(new Integer(child), myparents);
}
else
{
ArrayList<Integer> myparents = (ArrayList<Integer>)child_parent.get(child);
myparents.add(new Integer(father));
}
}
} catch (IOException ex) {
System.out.println("File not found: "+filename + " or unable to read file");
System.out.println(ex.getMessage());
}finally{
try{
if (br!=null){
br.close();
}
}catch(IOException ex){
System.out.println(ex);
}
}
System.out.println("Loaded fathers: "+fathers);
//System.out.println("Total num of classes: "+num_of_nodes);
}
public boolean isLeaf(Integer node)
{
if(parent_child.containsKey(node))
return true;
return false;
}
public void printGraphStats()
{
System.out.println("Graph parents: "+ graph_parents.size());
System.out.println("Graph childern: "+ graph_children.size());
graph_children.removeAll(graph_parents);
System.out.println("Leaves: "+ graph_children.size());
//graph_parents.removeAll(graph_children);
//System.out.println("First level: "+ graph_parents.size());
}
private ArrayList<Integer> getParentOf(Integer child) {
return (ArrayList<Integer>)child_parent.get(child);
}
public static void main(String[] args){
Graph graph = new Graph();
graph.loadGraphFromFile(args[0]);
graph.printGraphStats();
//graph.findFirstLevelOfHierarchy();
}
}
| 4,884 | 31.350993 | 96 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/data/CalculatedMeasures.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package data;
public class CalculatedMeasures {
double precision;
double recall;
double fmeasure;
double average_precision;
double accuracy; // for phase B - exact answers for yesno questions
double strict_accuracy;
double lenient_accuracy;
double mean_reciprocal_rank;
public CalculatedMeasures() {
precision = 0;
recall=0;
fmeasure=0;
average_precision=0;
accuracy=0;
strict_accuracy = 0;
lenient_accuracy = 0;
mean_reciprocal_rank = 0;
}
public double getAccuracy() {
return accuracy;
}
public void setAccuracy(double accuracy) {
this.accuracy = accuracy;
}
public double getLenient_accuracy() {
return lenient_accuracy;
}
public void setLenient_accuracy(double lenient_accuracy) {
this.lenient_accuracy = lenient_accuracy;
}
public double getMean_reciprocal_rank() {
return mean_reciprocal_rank;
}
public void setMean_reciprocal_rank(double mean_reciprocal_rank) {
this.mean_reciprocal_rank = mean_reciprocal_rank;
}
public double getStrict_accuracy() {
return strict_accuracy;
}
public void setStrict_accuracy(double strict_accuracy) {
this.strict_accuracy = strict_accuracy;
}
public double getAverage_precision() {
return average_precision;
}
public void setAverage_precision(double average_precision) {
this.average_precision = average_precision;
}
public double getFmeasure() {
return fmeasure;
}
public void setFmeasure(double fmeasure) {
this.fmeasure = fmeasure;
}
public double getPrecision() {
return precision;
}
public void setPrecision(double precision) {
this.precision = precision;
}
public double getRecall() {
return recall;
}
public void setRecall(double recall) {
this.recall = recall;
}
}
| 2,843 | 23.730435 | 75 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/data/ExactAnswer.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package data;
import java.util.ArrayList;
public class ExactAnswer {
String answer; // case of yesno questions
ArrayList<String> answers; // case of factoids and lists
ArrayList<ArrayList<String>> lists;
public ExactAnswer()
{
answers = new ArrayList<String>();
lists = new ArrayList<ArrayList<String>>();
}
public String getAnswer() {
return answer;
}
public void setAnswer(String answer) {
this.answer = answer;
}
public ArrayList<String> getAnswers() {
return answers;
}
public void setAnswers(ArrayList<String> answers) {
this.answers = answers;
}
public ArrayList<ArrayList<String>> getLists() {
return lists;
}
public void setLists(ArrayList<ArrayList<String>> lists) {
this.lists = lists;
}
boolean containsAnswer(String resp)
{
for(int i=0;i<answers.size();i++)
if(answers.get(i).equals(resp))
return true;
return false;
}
public boolean containsAnswerSynonym(ArrayList<String> resp,boolean remove)
{
for(int i=0;i<lists.size();i++)
{
ArrayList<String> listofans = lists.get(i);
for(int k=0;k<listofans.size();k++)
{
String ans1 = listofans.get(k);
for(int l=0;l<resp.size();l++)
{
if(ans1.equals(resp.get(l)))
{
// System.out.println(ans1+" "+resp.get(l));
if(remove) // will use this only for list questions
lists.remove(i);
return true;
}
}
}
}
return false;
}
}
| 2,644 | 26.268041 | 81 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/data/PubMedDocument.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package data;
public class PubMedDocument {
String text;
String title;
String pmid;
String journal;
String[] meshMajor;
public PubMedDocument(String text, String title, String pmid, String journal, String[] meshMajor) {
this.text = text;
this.title = title;
this.pmid = pmid;
this.journal = journal;
this.meshMajor = meshMajor;
}
public String getJournal() {
return journal;
}
public String[] getMeshMajor() {
return meshMajor;
}
public String getPmid() {
return pmid;
}
public String getText() {
return text;
}
public String getTitle() {
return title;
}
}
| 1,538 | 23.428571 | 103 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/data/Question.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package data;
import java.util.ArrayList;
public class Question {
String body;
ArrayList<String> concepts;
ArrayList<String> documents;
ExactAnswer exact_answer;
String id;
String ideal_answer;
ArrayList<Snippet> snippets;
int type;
ArrayList<Triple> triples;
public static final int
FACTOID=1, YESNO=2,SUMMARY=3,LIST=4;
public Question(String id, int type) {
this.id = id;
this.type = type;
concepts = new ArrayList<String>();
documents = new ArrayList<String>();
snippets = new ArrayList<Snippet>();
triples = new ArrayList<Triple>();
}
public Question() {
concepts = new ArrayList<String>();
documents = new ArrayList<String>();
snippets = new ArrayList<Snippet>();
triples = new ArrayList<Triple>();
}
public void setBody(String body) {
this.body = body;
}
public void setExact_answer(ExactAnswer exact_answer) {
this.exact_answer = exact_answer;
}
public ArrayList<String> getConcepts() {
return concepts;
}
public ArrayList<String> getDocuments() {
return documents;
}
public ExactAnswer getExact_answer() {
return exact_answer;
}
public String getId() {
return id;
}
public String getIdeal_answer() {
return ideal_answer;
}
public ArrayList<Snippet> getSnippets() {
return snippets;
}
public ArrayList<Triple> getTriples() {
return triples;
}
public int getType() {
return type;
}
public void setIdeal_answer(String ideal_answer) {
this.ideal_answer = ideal_answer;
}
public void addSnippet(Snippet sn)
{
snippets.add(sn);
}
public void addSnippets(ArrayList<Snippet> sn)
{
snippets.addAll(sn);
}
public void addTriple(Triple tr)
{
triples.add(tr);
}
public void addTriples(ArrayList<Triple> tr)
{
triples.addAll(tr);
}
public void addDocument(String doc)
{
documents.add(doc);
}
public void addDocuments(ArrayList<String> docs)
{
documents.addAll(docs);
}
public void addConcept(String con)
{
concepts.add(con);
}
public void addConcepts(ArrayList<String> conc)
{
concepts.addAll(conc);
}
public void setId(String id) {
this.id = id;
}
public void setType(int type) {
this.type = type;
}
public int numOfConcepts()
{
return this.concepts.size();
}
public int numOfDocs()
{
return this.documents.size();
}
public int numOfSnippets()
{
return this.snippets.size();
}
public boolean hasQuestionConcepts()
{
if(concepts.size()>0)
return true;
return false;
}
}
| 3,773 | 21.070175 | 75 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/data/Snippet.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package data;
public class Snippet {
String document;
String text;
String fieldNameBegin;
String fieldNameEnd;
int begin_index;
int end_index;
public Snippet(String document, String text, String fieldNameBegin, String fieldNameEnd, int begin_index, int end_index) {
this.document = document;
this.text = text;
this.fieldNameBegin = fieldNameBegin;
this.fieldNameEnd = fieldNameEnd;
this.begin_index = begin_index;
this.end_index = end_index;
}
public void setBegin_index(int begin_index) {
this.begin_index = begin_index;
}
public void setDocument(String document) {
this.document = document;
}
public void setEnd_index(int end_index) {
this.end_index = end_index;
}
public void setFieldNameBegin(String fieldNameBegin) {
this.fieldNameBegin = fieldNameBegin;
}
public void setFieldNameEnd(String fieldNameEnd) {
this.fieldNameEnd = fieldNameEnd;
}
public void setText(String text) {
this.text = text;
}
public int getBegin_index() {
return begin_index;
}
public String getDocument() {
return document;
}
public String getDocumentOnlyID() {
String parts[] = document.split("/");
//System.out.println(parts[parts.length-1]);
return parts[parts.length-1];
}
public int getEnd_index() {
return end_index;
}
public String getFieldNameBegin() {
return fieldNameBegin;
}
public String getFieldNameEnd() {
return fieldNameEnd;
}
public String getText() {
return text;
}
public int getSize()
{
return end_index-begin_index+1;
}
public double overlap(Snippet sn)
{
if((fieldNameBegin.equals(sn.getFieldNameBegin()) && fieldNameEnd.equals(sn.getFieldNameEnd()))||
(fieldNameBegin.equals("0") && sn.getFieldNameBegin().equals("abstract") ))
{
if(begin_index>sn.getEnd_index() || end_index<sn.getBegin_index())
return 0;
else
{
if(begin_index>=sn.getBegin_index() && end_index<=sn.getEnd_index())
return end_index-begin_index+1;
if(begin_index>=sn.getBegin_index() && end_index>sn.getEnd_index())
return sn.getEnd_index() - begin_index +1;
if(begin_index<sn.getBegin_index() && end_index<=sn.getEnd_index())
return end_index - sn.getBegin_index() +1;
if(begin_index<sn.getBegin_index() && end_index>sn.getEnd_index())
return sn.getEnd_index()-sn.getBegin_index()+1;
}
}
return 0;
}
public boolean itOverlaps(Snippet sn)
{
if((fieldNameBegin.equals(sn.getFieldNameBegin()) && fieldNameEnd.equals(sn.getFieldNameEnd()))||
(fieldNameBegin.equals("0") && sn.getFieldNameBegin().equals("abstract") ))
{
if(begin_index>sn.getEnd_index() || end_index<sn.getBegin_index())
return false;
else
{
if(begin_index>=sn.getBegin_index() && end_index<=sn.getEnd_index())
return true;
if(begin_index>=sn.getBegin_index() && end_index>sn.getEnd_index())
return true;
if(begin_index<sn.getBegin_index() && end_index<=sn.getEnd_index())
return true;
if(begin_index<sn.getBegin_index() && end_index>sn.getEnd_index())
return true;
}
}
return false;
}
public Snippet merge(Snippet sn)
{
Snippet newsn = new Snippet(document, text, fieldNameBegin, fieldNameEnd, -1, -1);
if(begin_index<=sn.begin_index)
newsn.setBegin_index(begin_index);
else
newsn.setBegin_index(sn.begin_index);
if(end_index>=sn.end_index)
newsn.setEnd_index(end_index);
else
newsn.setEnd_index(sn.end_index);
return newsn;
}
}
| 5,046 | 29.221557 | 126 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/data/Task1bData.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package data;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
public class Task1bData {
ArrayList<Question> questions;
int VERSION_OF_CHALLENGE;
boolean isGold;
/**
* Data Loader for gold files and submissions
*
* @param version VERSION_OF_CHALLENGE, Use version 2 for BioASQ1&2, version 3 for BioASQ3&4, version 5 since BioASQ5, version 8 since BioASQ8
* @param isGold Whether data to read are for gold data or not (since BioASQ5 different format for gold and submitted data, i.e. synonyms only in gold data)
*/
public Task1bData(int version, boolean isGold)
{
questions = new ArrayList<Question>();
VERSION_OF_CHALLENGE = version;
this.isGold = isGold;
}
public void readData(String jsonFile) throws IOException {
int num_questions=0;
int num_triples=0;
int type_yesno=0;
int type_factoid=0;
int type_list=0;
int type_summary=0;
try {
JsonReader reader = new JsonReader(new InputStreamReader(new FileInputStream(jsonFile)));
//reader.setLenient(true);
// JsonToken peeknext = reader.peek();
// peeknext.
reader.beginObject();
while(reader.hasNext())
{
String nextfield = reader.nextName();
if(nextfield.equals("questions"))
{
reader.beginArray();
while (reader.hasNext()) {
reader.beginObject();
num_questions++;
Question qst = new Question();
while(reader.hasNext())
{
String name = reader.nextName();
int k=0;
if(name.equals("body"))
{
String body = reader.nextString();
qst.setBody(body);
}
else if(name.equals("triples"))
{
num_triples++;
ArrayList<Triple> triples = readTriplesArray(reader);
qst.addTriples(triples);
}
else if(name.equals("type"))
{
String type = reader.nextString();
if(type.equals("yesno"))
{
qst.setType(Question.YESNO);
type_yesno++;
}
else if(type.equals("factoid"))
{
qst.setType(Question.FACTOID);
type_factoid++;
}
if(type.equals("summary"))
{
qst.setType(Question.SUMMARY);
type_summary++;
}
if(type.equals("list"))
{
qst.setType(Question.LIST);
type_list++;
}
}
else if(name.equals("id"))
{
String id = reader.nextString();
qst.setId(id);
}
else if(name.equals("concepts"))
{
ArrayList<String> concepts = readConcepts(reader);
qst.addConcepts(concepts);
}
else if(name.equals("documents"))
{
ArrayList<String> docs = readDocuments(reader);
qst.addDocuments(docs);
}
else if(name.equals("exact_answer"))
{
ExactAnswer ea = new ExactAnswer();
JsonToken peek = reader.peek();
if(peek == JsonToken.BEGIN_ARRAY) //list or factoid
{
reader.beginArray();
JsonToken peek1 = reader.peek();
ArrayList<String> listOfAnswers=new ArrayList<String>();
ArrayList<ArrayList<String>> listofarrays = new ArrayList<ArrayList<String>>();
if(peek1==JsonToken.BEGIN_ARRAY) // list (or factoid-list since BioASQ3)
{
/*
* Warning: changed the following for BioASQ 5
* No synonyms in submissions anymore, only in gold files
*/
if(VERSION_OF_CHALLENGE == evaluation.EvaluatorTask1b.BIOASQ2 || VERSION_OF_CHALLENGE == evaluation.EvaluatorTask1b.BIOASQ3){
listofarrays = readExactAnswerListOfArraysv2(reader);
ea.setLists(listofarrays);
} else if(VERSION_OF_CHALLENGE == evaluation.EvaluatorTask1b.BIOASQ5 || VERSION_OF_CHALLENGE == evaluation.EvaluatorTask1b.BIOASQ8){
if(!this.isGold){ // For submissions use restricted parsing : only first of synonyms taken into account
listofarrays = readExactAnswerListOfArraysv3(reader);
} else { // For golden read all synonyms normally
listofarrays = readExactAnswerListOfArraysv2(reader);
}
ea.setLists(listofarrays);
} else
{
System.out.println("Wrong challenge version. I will exit.");
System.exit(0);
}
}
else if(peek1 == JsonToken.STRING) // factoid (for BioASQ1&2)
{
/*
* Warning: changed the following for BioASQ 3
* we now have list of arrays for factoid
*/
if(VERSION_OF_CHALLENGE == evaluation.EvaluatorTask1b.BIOASQ2){
listOfAnswers = readExactAnswerArray(reader);
ea.setAnswers(listOfAnswers);
}
//not reached!
else if(VERSION_OF_CHALLENGE == evaluation.EvaluatorTask1b.BIOASQ3){
listofarrays = readExactAnswerListOfArraysv2(reader);
ea.setLists(listofarrays);
}
/*
* Warning: changed the following for BioASQ 5
* No synonyms are submitted anymore by participants
*/
//not reached!
else if(VERSION_OF_CHALLENGE == evaluation.EvaluatorTask1b.BIOASQ5 || VERSION_OF_CHALLENGE == evaluation.EvaluatorTask1b.BIOASQ8){
listofarrays = readExactAnswerListOfArraysv3(reader);
ea.setLists(listofarrays);
}
else
{
System.out.println("Wrong challenge version. I will exit.");
System.exit(0);
}
}
//ea.setAnswers(listOfAnswers);
qst.setExact_answer(ea);
reader.endArray();
}
else if(peek == JsonToken.STRING) //yesno
{
String yesno_answer = reader.nextString();
yesno_answer = yesno_answer.toLowerCase();
if(yesno_answer.contains("yes"))
ea.setAnswer("yes");
else if(yesno_answer.contains("no"))
ea.setAnswer("no");
else
{
ea.setAnswer("none");
// System.out.println("Unknown answer in yesno question: "+yesno_answer);
}
qst.setExact_answer(ea);
}
}
// Edited for BioASQ4 Evaluation (to solve format conflict with Rouge.py)
// ideal answers are not evaluated with this code, so no need to read them(Rouge and manual queration is used instead)
// else if(name.equals("ideal_answer"))
// {
// String ideal="";
// try{ideal = reader.nextString();}catch(IllegalStateException ex){System.out.println(ex.toString());System.out.println(jsonFile);
// }
// qst.setIdeal_answer(ideal);
// }
else if(name.equals("snippets"))
{
ArrayList<Snippet> snippets = readSnippets(reader);
qst.addSnippets(snippets);
}
else
{
reader.skipValue();
}
}
//reader.skipValue();
reader.endObject();
this.questions.add(qst);
}
reader.endArray();
}
else
{
reader.skipValue();
}
}
reader.endObject();
/* System.out.println("Number of questions:"+num_questions);
System.out.println("Number of triples:"+num_triples);
System.out.println("Number of yesno:"+type_yesno);
System.out.println("Number of factoid:"+type_factoid);
System.out.println("Number of list:"+type_list);
System.out.println("Number of summary:"+type_summary);*/
} catch (FileNotFoundException ex) {
System.out.println("Problem in JSONfile : "+jsonFile);
}
}
private ArrayList<String> readExactAnswerArray(JsonReader reader)
{
ArrayList<String> answers = new ArrayList<String>();
int count = 0;
try {
while (reader.hasNext()) {
String nextString = reader.nextString();
answers.add(nextString.toLowerCase());
}
} catch (IOException ex) {
}
return answers;
}
private ArrayList<String> readExactAnswerListOfArrays(JsonReader reader)
{
ArrayList<String> answers = new ArrayList<String>();
int count = 0;
try {
while(reader.hasNext()){
reader.beginArray();
while (reader.hasNext()) {
ArrayList<String> temp_ans = readExactAnswerArray(reader);
answers.addAll(temp_ans);
}
reader.endArray();
}
} catch (IOException ex) {
}
return answers;
}
/** Reads exact answers submitted by systems for list [1] and factoid [2] questions
* Also reads gold exact answers for list and factoid questions ( where synonyms included in BioASQ5 too)
* [1] Used for list questions, up to BioASQ4, when synonyms where submitted by participants
* [2] Used for factoid questions, of BioASQ3&4, when synonyms where submitted by participants
*/
private ArrayList<ArrayList<String>> readExactAnswerListOfArraysv2(JsonReader reader)
{
ArrayList<ArrayList<String>> answers = new ArrayList<ArrayList<String>>();
int count = 0;
try {
while(reader.hasNext()){
reader.beginArray();
while (reader.hasNext()) {
ArrayList<String> temp_ans = readExactAnswerArray(reader);
answers.add(temp_ans);
}
reader.endArray();
}
} catch (IOException ex) {
}
// System.out.println(answers);
return answers;
}
/** Reads exact answers submitted by systems for list questions and factoid questions
* Used since BioASQ 5, where no synonyms where submitted by participants
* Only the first element of the inner list is taken into account for evaluation
* Note: Not used for golden exact answers, where synonyms included
*/
private ArrayList<ArrayList<String>> readExactAnswerListOfArraysv3(JsonReader reader)
{
ArrayList<ArrayList<String>> answers = new ArrayList<ArrayList<String>>();
int count = 0;
try {
while(reader.hasNext()){
reader.beginArray();
while (reader.hasNext()) {
ArrayList<String> temp_ans = readExactAnswerArray(reader); // Full answer submitted (with possible synonyms)
ArrayList<String> temp_ans_fisrt_item = new ArrayList<String>(); // edited answer (only fisrt synonym kept)
if(!temp_ans.isEmpty()){
temp_ans_fisrt_item.add(temp_ans.get(0));
}
answers.add(temp_ans_fisrt_item);
}
reader.endArray();
}
} catch (IOException ex) {
}
// System.out.println(answers);
return answers;
}
private ArrayList<Triple> readTriplesArray(JsonReader reader){
ArrayList<Triple> triples = new ArrayList<Triple>();
try {
reader.beginArray();
while (reader.hasNext()) {
reader.beginObject();
String op="",pre="",sub="";
while(reader.hasNext())
{
String name = reader.nextName();
if(name.equals("o"))
{
JsonToken peek = reader.peek();
if(peek.equals(JsonToken.NULL)){
op = "";reader.nextNull();}
else
op = reader.nextString();
}
else if(name.equals("p"))
{
pre = reader.nextString();
}
else if(name.equals("s"))
{
JsonToken peek = reader.peek();
if(peek.equals(JsonToken.NULL)){
sub="";reader.nextNull();
}
else
sub = reader.nextString();
}
else
reader.skipValue();
}
Triple tr = new Triple(pre, sub, op);
reader.endObject();
triples.add(tr);
}
reader.endArray();
} catch (IOException ex) {
}
return triples;
}
private ArrayList<Snippet> readSnippets(JsonReader reader) {
ArrayList<Snippet> snippets = new ArrayList<Snippet>();
try {
reader.beginArray();
while (reader.hasNext()) {
reader.beginObject();
String document="",fnameBegin="",fnameEnd="",text="";
int beginIndex=0;
int endIndex=0;
while(reader.hasNext())
{
String name = reader.nextName();
if(name.equals("offsetInBeginSection"))
{
beginIndex = reader.nextInt();
}
else if(name.equals("offsetInEndSection"))
{
endIndex = reader.nextInt();
}
else if(name.equals("document"))
{
document = reader.nextString();
}
else if(name.equals("beginSection"))
{
fnameBegin = reader.nextString();
fnameBegin = fnameBegin.substring(fnameBegin.indexOf('.')+1);
}
else if(name.equals("endSection"))
{
fnameEnd = reader.nextString();
fnameEnd = fnameEnd.substring(fnameEnd.indexOf('.')+1);
}
else if(name.equals("text"))
{
text = reader.nextString();
}
else
{
//System.out.println("Unknown field "+name +" in snippet");
}
}
Snippet sn = new Snippet(document, text, fnameBegin, fnameEnd, beginIndex, endIndex);
reader.endObject();
snippets.add(sn);
}
reader.endArray();
} catch (IOException ex) {
}
return snippets;
}
public ArrayList<String> readConcepts(JsonReader reader){
ArrayList<String> conc = new ArrayList<String>();
int count=0;
try{
reader.beginArray();
while (reader.hasNext()) {
String nextString = reader.nextString();
if(!conc.contains(nextString))
conc.add(nextString);
}
reader.endArray();
}catch(IOException ex){}
return conc;
}
public ArrayList<String> readDocuments(JsonReader reader){
ArrayList<String> docs = new ArrayList<String>();
int count=0;
try{
reader.beginArray();
while (reader.hasNext()) {
String nextString = reader.nextString();
if(!docs.contains(nextString))
docs.add(nextString);
}
reader.endArray();
}catch(IOException ex){}
return docs;
}
public Question getQuestion(String id)
{
for(int i=0;i<questions.size();i++)
{
Question qst = questions.get(i);
if(qst.getId().equals(id))
return qst;
}
return null;
}
public Question getQuestion(int index)
{
return questions.get(index);
}
public int numQuestions()
{
return questions.size();
}
public void dataProperties()
{
int docs=0,conc=0,snip=0;
for(int i=0;i<questions.size();i++)
{
docs += questions.get(i).numOfDocs();
conc += questions.get(i).numOfConcepts();
snip += questions.get(i).numOfSnippets();
}
System.out.println("Avrg docs: "+ (double)docs/(double)questions.size());
System.out.println("Avrg concepts: "+ (double)conc/(double)questions.size());
System.out.println("Avrg snippets: "+ (double)snip/(double)questions.size());
}
public static void main(String args[])
{
Task1bData data = new Task1bData(2, false);
try {
data.readData(args[0]);
data.dataProperties();
} catch (IOException ex) {
Logger.getLogger(Task1bData.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
| 22,193 | 37.397924 | 168 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/data/TaskADataParser.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package data;
import com.google.gson.stream.JsonReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.logging.Level;
import java.util.logging.Logger;
public class TaskADataParser {
HashSet journalList;
int numeOfArticles=0;
double labelsPerArticle=0.0;
HashSet labelsList;
double labelDensity=0;
HashSet pmids;
/**
*
* Return a json reader and opens the array
*
*/
public static JsonReader streamParser(String jsonFile) throws IOException {
int count = 0;
int abstract_count=0;
int duplicates = 0;
JsonReader reader =null;
try {
reader = new JsonReader(new InputStreamReader(new FileInputStream(jsonFile)));
reader.setLenient(true);
reader.beginObject();
String nam = reader.nextName();
System.out.println(nam);
reader.beginArray();
} catch (Exception ex) {
System.out.println("File not found");
System.out.println(ex.toString());
}
return reader;
}
public static void closeReader(JsonReader reader)
{
try {
reader.endArray();
reader.endObject();
} catch (IOException ex) {
Logger.getLogger(TaskADataParser.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static PubMedDocument getNextDocument(JsonReader reader)
{
String text=null;
String title=null;
String pmid=null;
String journal=null;
String[] meshMajor=null;
try {
if (reader.hasNext()) {
reader.beginObject();
while (reader.hasNext()) {
String name = reader.nextName();
if (name.equals("abstractText")) {
text = reader.nextString();
} else if (name.equals("journal")) {
journal = reader.nextString();
} else if (name.equals("meshMajor")) {
meshMajor = readLabelsArray(reader);
} else if (name.equals("pmid")) {
pmid = reader.nextString();
} else if (name.equals("title")){
title = reader.nextString();
}
else if (name.equals("year")){
reader.skipValue();
}
else{
System.out.println(name);
reader.skipValue();
}
}
reader.endObject();
}
} catch (Exception ex) { }
return new PubMedDocument(text, title, pmid, journal, meshMajor);
}
public static String[] readLabelsArray(JsonReader reader){
String labels[];
ArrayList<String> lab = new ArrayList<String>();
try{
reader.beginArray();
while (reader.hasNext()) {
String nextString = reader.nextString();
lab.add(nextString);
}
reader.endArray();
}catch(IOException ex){}
labels = new String[lab.size()];
labels = lab.toArray(labels);
return labels;
}
}
| 4,387 | 29.685315 | 90 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/data/Triple.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package data;
public class Triple {
String predicate;
String subject;
String operator;
public Triple(String predicate, String subject, String operator) {
this.predicate = predicate;
this.subject = subject;
this.operator = operator;
}
public String getOperator() {
return operator;
}
public void setOperator(String operator) {
this.operator = operator;
}
public String getPredicate() {
return predicate;
}
public void setPredicate(String predicate) {
this.predicate = predicate;
}
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
@Override
public int hashCode() {
int hash = 7;
hash = 47 * hash + (this.predicate != null ? this.predicate.hashCode() : 0);
hash = 47 * hash + (this.subject != null ? this.subject.hashCode() : 0);
hash = 47 * hash + (this.operator != null ? this.operator.hashCode() : 0);
return hash;
}
@Override public boolean equals(Object e)
{
if ( this == e ) return true;
if ( !(e instanceof Triple) ) return false;
Triple tr = (Triple)e;
return this.predicate.equals(tr.predicate) && this.operator.equals(tr.operator) &&
this.subject.equals(tr.subject);
}
}
| 2,247 | 26.753086 | 90 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/data/PreProcess/ExtractVocabulary.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package data.PreProcess;
import com.google.gson.stream.JsonReader;
import data.PubMedDocument;
import data.TaskADataParser;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.tartarus.snowball.SnowballStemmer;
/**
*
* @author alvertos
*/
public class ExtractVocabulary {
HashSet vocabulary;
TreeMap uniqueWords;
JsonReader reader;
public ExtractVocabulary()
{
}
public ExtractVocabulary(String datafile) {
vocabulary = new HashSet();
uniqueWords = new TreeMap();
try {
reader = TaskADataParser.streamParser(datafile);
} catch (IOException ex) {
}
}
public void makeVoc(String vocfile,String uniquefile) throws InstantiationException, IllegalAccessException
{
int numofdocs=0;
Class stemClass;
SnowballStemmer stemmer=null;
try {
stemClass = Class.forName("org.tartarus.snowball.ext.englishStemmer");
stemmer = (SnowballStemmer)stemClass.newInstance();
} catch (ClassNotFoundException ex) {
Logger.getLogger(ExtractVocabulary.class.getName()).log(Level.SEVERE, null, ex);
}
try {
while(reader.hasNext()){
PubMedDocument nextDocument = TaskADataParser.getNextDocument(reader);
numofdocs++;
if(numofdocs%100000==0){
System.out.println(numofdocs + " of documents have been processed");
System.out.println("Size of vocabulary: "+vocabulary.size());
}
String absrt = nextDocument.getText();
String words[] = absrt.split("[\\W]+");
//String wordsInLowerCase[] = new String[words.length];
String wordInLowerCase;
for (int k = 0; k < words.length; k++)
{
wordInLowerCase = words[k].toLowerCase();
stemmer.setCurrent(wordInLowerCase);
if (stemmer.stem()) {
wordInLowerCase = stemmer.getCurrent();
}
if(uniqueWords.containsKey(wordInLowerCase))
{Integer freq = (Integer)uniqueWords.get(wordInLowerCase);
uniqueWords.put(wordInLowerCase,freq.intValue()+1);}
else{uniqueWords.put(wordInLowerCase,1);}
vocabulary.add(wordInLowerCase);
}
}
writeVocabularyToFile(vocfile);
writeUniqueWordsToFile(uniquefile);
} catch (Exception ex) {
writeVocabularyToFile(vocfile);
writeUniqueWordsToFile(uniquefile);
}
TaskADataParser.closeReader(reader);
}
/**
* @param vocfile The vocabulary that will be used to vectorize the documents. For each document a term frequency will be
* calculated
* @param namepmidf
* @param pmidintegerf
* @param outfile
* @throws InstantiationException
* @throws IllegalAccessException
*/
public void vectorizeDocuments(String vocfile,String namepmidf,String pmidintegerf,String outfile) throws InstantiationException, IllegalAccessException
{
int numofdocs=0;
Class stemClass;
SnowballStemmer stemmer=null;
try {
stemClass = Class.forName("org.tartarus.snowball.ext.englishStemmer");
stemmer = (SnowballStemmer)stemClass.newInstance();
} catch (ClassNotFoundException ex) {
Logger.getLogger(ExtractVocabulary.class.getName()).log(Level.SEVERE, null, ex);
}
TreeMap vocab = loadVocabularyMap(vocfile);
TreeMap namepmid = loadNamePMIDMapping(namepmidf);
TreeMap pmidinteger = loadPMIDIntegerMapping(pmidintegerf);
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(outfile));
while(reader.hasNext()){
TreeMap doc = new TreeMap();
PubMedDocument nextDocument = TaskADataParser.getNextDocument(reader);
numofdocs++;
if(numofdocs%10000==0){
System.out.println(numofdocs + " of documents have been processed");
}
String absrt = nextDocument.getText();
String words[] = absrt.split("[\\W]+");
//String wordsInLowerCase[] = new String[words.length];
String wordInLowerCase;
String[] meshMajor = nextDocument.getMeshMajor();
for (int k = 0; k < words.length; k++)
{
wordInLowerCase = words[k].toLowerCase();
stemmer.setCurrent(wordInLowerCase);
if (stemmer.stem()) {
wordInLowerCase = stemmer.getCurrent();
}
if(vocab.containsKey(wordInLowerCase))
{
if(doc.containsKey(wordInLowerCase))
{
Integer freq = (Integer)doc.get(wordInLowerCase);
doc.put(wordInLowerCase, freq.intValue()+1);
}else{doc.put(wordInLowerCase, 1);}
}
}
// System.out.println("Size of vectorized doc:"+doc.size());
String vector = vectorToString(doc, vocab);
for(int i=0;i<meshMajor.length;i++)
{
String pmid = (String)namepmid.get(meshMajor[i]);
Integer intid = (Integer)pmidinteger.get(pmid);
if(i==0)
bw.write(intid.toString());
else
bw.write(","+intid.toString());
}
bw.write(vector+"\n");
}
bw.close();
} catch (Exception ex) {
try {
bw.close();
} catch (IOException ex1) {
Logger.getLogger(ExtractVocabulary.class.getName()).log(Level.SEVERE, null, ex1);
}
}
TaskADataParser.closeReader(reader);
}
String vectorToString(TreeMap document,TreeMap vocab)
{
String vec = "";
Iterator iter = document.keySet().iterator();
while(iter.hasNext())
{
String word = (String)iter.next();
Integer id = (Integer)vocab.get(word);
Integer freq = (Integer)document.get(word);
vec+=" "+id.intValue()+":"+freq.doubleValue();
}
return vec;
}
public TreeMap loadPMIDIntegerMapping(String mapfile)
{
BufferedReader br = null;
TreeMap mapping = new TreeMap();
try {
br = new BufferedReader(new FileReader(mapfile));
String line;
while((line=br.readLine())!=null){
String nodes[] = line.split("\\s+");
mapping.put(nodes[0],Integer.parseInt(nodes[1]));
}
br.close();
}catch(IOException ex){
}
return mapping;
}
public TreeMap loadNamePMIDMapping(String mapfile)
{
BufferedReader br = null;
TreeMap mapping = new TreeMap();
try {
br = new BufferedReader(new FileReader(mapfile));
String line;
while((line=br.readLine())!=null){
String nodes[] = line.split("=");
mapping.put(nodes[0],nodes[1]);
}
br.close();
}catch(IOException ex){
}
return mapping;
}
private void writeVocabularyToFile(String vocfile) {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(vocfile));
Iterator iter = this.vocabulary.iterator();
while(iter.hasNext())
{
String word = (String)iter.next();
bw.write(word+"\n");
}
bw.close();
} catch (IOException ex) {
Logger.getLogger(ExtractVocabulary.class.getName()).log(Level.SEVERE, null, ex);
}
}
private void writeUniqueWordsToFile(String uniqueWordsfile) {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(uniqueWordsfile));
Iterator iter = this.uniqueWords.keySet().iterator();
while(iter.hasNext())
{
String word = (String)iter.next();
Integer freq = (Integer)uniqueWords.get(word);
bw.write(word+" "+freq.intValue()+"\n");
}
bw.close();
} catch (IOException ex) {
Logger.getLogger(ExtractVocabulary.class.getName()).log(Level.SEVERE, null, ex);
}
}
private void removeStopWords(String filestopwords,String vocabularyfile,String outfile){
HashSet stopwords = loadStopWords(filestopwords);
vocabulary = loadVocabulary(vocabularyfile);
HashSet voc_new = new HashSet();
System.out.println("Size of vocabular: "+vocabulary.size());
Iterator iter = vocabulary.iterator();
while(iter.hasNext())
{
String word = (String)iter.next();
if(!stopwords.contains(word))
voc_new.add(word);
}
System.out.println("Size of vocabular after stopword removal: "+voc_new.size());
writeVocabularyToFile(voc_new,outfile);
}
private void removeLowFrequencyWords(String filesfreqs,String vocabularyfile,String outfile,int min_freq){
TreeMap freqsfile = loadDocTermFrequencies(filesfreqs);
vocabulary = loadVocabulary(vocabularyfile);
HashSet voc_new = new HashSet();
System.out.println("Size of vocabular: "+vocabulary.size());
Iterator iter = vocabulary.iterator();
while(iter.hasNext())
{
String word = (String)iter.next();
Integer freq = (Integer)freqsfile.get(word);
if(freq.intValue()>min_freq)
voc_new.add(word);
}
System.out.println("Size of vocabular after low frequency words removal: "+voc_new.size());
writeVocabularyToFile(voc_new,outfile);
}
private HashSet loadStopWords(String filestopwords) {
HashSet list =new HashSet();
try {
BufferedReader br = null;
br = new BufferedReader(new FileReader(filestopwords));
String line=null;
while((line = br.readLine())!=null)
{
list.add(line);
}
} catch (IOException ex) {
}
return list;
}
private HashSet loadVocabulary(String filevoc) {
HashSet list =new HashSet();
try {
BufferedReader br = null;
br = new BufferedReader(new FileReader(filevoc));
String line=null;
while((line = br.readLine())!=null)
{
list.add(line);
}
} catch (IOException ex) {
}
return list;
}
private TreeMap loadVocabularyMap(String filevoc) {
TreeMap list =new TreeMap();
int counter=1;
try {
BufferedReader br = null;
br = new BufferedReader(new FileReader(filevoc));
String line=null;
while((line = br.readLine())!=null)
{
list.put(line,counter++);
}
} catch (IOException ex) {
}
return list;
}
private TreeMap loadDocTermFrequencies(String filefreq){
TreeMap list =new TreeMap();
try {
BufferedReader br = null;
br = new BufferedReader(new FileReader(filefreq));
String line=null;
while((line = br.readLine())!=null)
{
String temp[] = line.split("\\s+");
list.put(temp[0],Integer.parseInt(temp[1]));
}
} catch (IOException ex) {
}
return list;
}
public static void main(String args[])
{
if(args[0].equals("-makeVoc"))
{
ExtractVocabulary evoc = new ExtractVocabulary(args[1]);
try {
evoc.makeVoc(args[2],args[3]);
} catch (InstantiationException ex) {
} catch (IllegalAccessException ex) {
}
}
if(args[0].equals("-stopwords"))
{
ExtractVocabulary evoc = new ExtractVocabulary();
evoc.removeStopWords(args[1], args[2], args[3]);
}
if(args[0].equals("-lowfreq"))
{
ExtractVocabulary evoc = new ExtractVocabulary();
evoc.removeLowFrequencyWords(args[1], args[2], args[3],Integer.parseInt(args[5]));
}
if(args[0].equals("-vectorize"))
{
ExtractVocabulary evoc = new ExtractVocabulary(args[1]);
try {
evoc.vectorizeDocuments(args[2],args[3],args[4],args[5]);
} catch (InstantiationException ex) {
} catch (IllegalAccessException ex) {
}
}
}
private void writeVocabularyToFile(HashSet voc_new, String outfile) {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(outfile));
Iterator iter = voc_new.iterator();
while(iter.hasNext())
{
String word = (String)iter.next();
bw.write(word+"\n");
}
bw.close();
} catch (IOException ex) {
Logger.getLogger(ExtractVocabulary.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
| 15,632 | 31.981013 | 156 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/evaluation/ConfusionMatrix.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package evaluation;
/**
* This class implements a confusion matrix between
* two objects containing list of items
* (for example classes in a classification problem for documents).
*
*
*/
public class ConfusionMatrix {
int tp; // count for true positives
int tn; // count for true negatives
int fp; // count for false positives
int fn; // count for false negatives
public ConfusionMatrix()
{
tp=0;fp=0;tn=0;fn=0;
}
public void increaseTP()
{
tp++;
}
public void increaseTN()
{
tn++;
}
public void increaseFP()
{
fp++;
}
public void increaseFN()
{
fn++;
}
public int getFn() {
return fn;
}
public int getFp() {
return fp;
}
public int getTn() {
return tn;
}
public int getTp() {
return tp;
}
}
| 1,723 | 20.822785 | 75 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/evaluation/Evaluator.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package evaluation;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
/** This script called for BioASQ Task A evaluation, for flat measures.
*
* Example call
* java -cp BioASQEvaluation2018.jar evaluation.Evaluator "...\golden_labels.txt" "...\submission_mapped.txt" -verbose
* or java -cp BioASQEvaluation2018.jar evaluation.Evaluator "...\golden_labels.txt" "...\submission_mapped.txt" -verbose
* *golden_labels.txt and submission_mapped.txt should have been mapped to integer format using converters.MapMeshResults
* @author tasosnent
*/
public class Evaluator {
Map class_results;
ArrayList<String> truePredictions;
int size_of_true_labels;
int size_of_predicted_label;
private boolean verbosity = false;
public Evaluator()
{
class_results = new TreeMap<Integer,ConfusionMatrix>();
size_of_true_labels = 0;
}
public Evaluator(ArrayList<Integer> class_ids)
{
class_results = new TreeMap<Integer,ConfusionMatrix>();
for(int i=0;i<class_ids.size();i++)
{
class_results.put(new Integer(class_ids.get(i)),new ConfusionMatrix());
}
}
public void increaseTP(int class_id)
{
ConfusionMatrix cm = (ConfusionMatrix)class_results.get(class_id);
try{
cm.increaseTP();
}catch(NullPointerException ex){
//System.out.println("Class id: "+class_id);
class_results.put(new Integer(class_id), new ConfusionMatrix());
cm = (ConfusionMatrix)class_results.get(class_id);
cm.increaseTP();
}
}
public void increaseTN(int class_id)
{
ConfusionMatrix cm = (ConfusionMatrix)class_results.get(class_id);
cm.increaseTN();
}
public void increaseFP(int class_id)
{
ConfusionMatrix cm = (ConfusionMatrix)class_results.get(class_id);
try{
cm.increaseFP();
}catch(NullPointerException ex){
class_results.put(new Integer(class_id), new ConfusionMatrix());
cm = (ConfusionMatrix)class_results.get(class_id);
cm.increaseFP();
}
}
public void increaseFN(int class_id)
{
ConfusionMatrix cm = (ConfusionMatrix)class_results.get(class_id);
try{
cm.increaseFN();
}catch(NullPointerException ex){
class_results.put(new Integer(class_id), new ConfusionMatrix());
cm = (ConfusionMatrix)class_results.get(class_id);
cm.increaseFN();
}
}
/**
* Calculates the Micro-precision measure for multilabel cases.
*
* @return
*/
public double microPrecision()
{
int a=0,b=0;
Iterator iterator = class_results.keySet().iterator();
while(iterator.hasNext())
{
Integer cl = (Integer)iterator.next();
ConfusionMatrix cm = (ConfusionMatrix)class_results.get(cl);
a+= cm.getTp();
b+= cm.getTp()+cm.getFp();
}
return (double)a/(double)b;
}
public double microRecall()
{
int a=0,b=0;
Iterator iterator = class_results.keySet().iterator();
while(iterator.hasNext())
{
Integer cl = (Integer)iterator.next();
ConfusionMatrix cm = (ConfusionMatrix)class_results.get(cl);
a+= cm.getTp();
b+= cm.getTp()+cm.getFn();
}
return (double)a/(double)b;
}
public double microFmeasure()
{
double a = microPrecision();
double b = microRecall();
return 2*a*b/(a+b);
}
public double macroPrecision()
{
int a=0,b=0;
Iterator iterator = class_results.keySet().iterator();
double sum=0.0;
while(iterator.hasNext())
{
Integer cl = (Integer)iterator.next();
ConfusionMatrix cm = (ConfusionMatrix)class_results.get(cl);
if(cm.getTp()==0 && cm.getFp()==0)
continue;
sum+= (double)cm.getTp()/(double)(cm.getTp()+cm.getFp());
}
return sum/(double)this.size_of_predicted_label;
}
public double macroRecall()
{
double sum=0.0;
Iterator iterator = class_results.keySet().iterator();
while(iterator.hasNext())
{
Integer cl = (Integer)iterator.next();
ConfusionMatrix cm = (ConfusionMatrix)class_results.get(cl);
if(cm.getTp()==0 && cm.getFn()==0)
continue;
sum+= (double)cm.getTp()/(double)(cm.getTp()+cm.getFn());
}
return sum/(double)this.size_of_true_labels;
}
public double macroFmeasure()
{
Iterator iterator = class_results.keySet().iterator();
double pre=0.0;
double rec=0.0;
double macroF=0.0;
while(iterator.hasNext())
{
Integer cl = (Integer)iterator.next();
ConfusionMatrix cm = (ConfusionMatrix)class_results.get(cl);
if(cm.getTp()!=0 || cm.getFp()!=0)
{
pre = (double)cm.getTp()/(double)(cm.getTp()+cm.getFp());
}
if(cm.getTp()!=0 || cm.getFn()!=0)
{
rec= (double)cm.getTp()/(double)(cm.getTp()+cm.getFn());
}
if(pre!=0 || rec!=0)
macroF += (2*pre*rec)/(pre+rec);
}
return macroF/this.size_of_true_labels;
}
/**
* This function loads from a file the true labels.
*
* @param trueLabels the full path to the file with the true labels
*/
public void loadTrueLabels(String trueLabels){
BufferedReader br2 = null;
truePredictions = new ArrayList<String>();
int row = 0;
try {
br2 = new BufferedReader(new FileReader(trueLabels));
String true_preds;
while((true_preds=br2.readLine())!=null){
row++;
truePredictions.add(true_preds);
String []true_labels = true_preds.split("\\s+");
for(int i=0;i<true_labels.length;i++)
{
Integer intLabel = Integer.parseInt(true_labels[i]);
if(!class_results.containsKey(intLabel))
{
class_results.put(intLabel,new ConfusionMatrix());
}
}
}
size_of_true_labels = class_results.size();
} catch (IOException ex) {
System.out.println("File not found: "+trueLabels + " or unable to read file");
System.out.println(ex.getMessage());
}finally{
try{
if (br2!=null){
br2.close();
}
}catch(IOException ex){
System.out.println(ex);
}
}
}
/**
* This function removes duplicates from an array of given labels. It is used while
* reading the file with the predicted labels.
*
* @param labels the array with the labels to be checked for duplicates
*/
public String[] removeDuplicates(String labels[])
{
TreeSet aset = new TreeSet();
aset.addAll(Arrays.asList(labels));
int num_of_labels = aset.size();
String finallabels[] = new String[num_of_labels];
Iterator iterator = aset.iterator();
int k=0;
while(iterator.hasNext())
{
finallabels[k++] = (String)iterator.next();
}
return finallabels;
}
public void evaluateTLExternal(String resultsFile)
{
BufferedReader br=null;
double accuracy=0.0;
double example_based_precision=0.0;
double example_based_recall=0.0;
double example_based_f = 0.0;
HashSet<Integer> labels_in_predictions = new HashSet<Integer>();
int row = 0;
try {
br = new BufferedReader(new FileReader(resultsFile));
String line;
while((line=br.readLine())!=null){
String predicted_values[] = line.split("\\s+");
predicted_values = removeDuplicates(predicted_values);
String tpres = (String)truePredictions.get(row);
String true_labels[] = tpres.split("\\s+");
double common_labels=0;
for(int k=0;k<true_labels.length;k++) // find the common labels
{
Integer trueLab = Integer.parseInt(true_labels[k]);
boolean foundLabel=false;
for(int j=0;j<predicted_values.length;j++)
{
Integer predLab = Integer.parseInt(predicted_values[j]);
if(predLab.intValue()==trueLab.intValue())
{
common_labels+=1.0;
foundLabel = true;
break;
}
}
if(!foundLabel) // this is for label based measures
increaseFN(trueLab);
}
// calculate label based measures
for(int j=0;j<predicted_values.length;j++)
{
Integer predLab = Integer.parseInt(predicted_values[j]);
labels_in_predictions.add(predLab);
boolean foundLabel=false;
for(int k=0;k<true_labels.length;k++)
{
Integer trueLab = Integer.parseInt(true_labels[k]);
if(trueLab.intValue()==predLab.intValue())
{
increaseTP(trueLab);
foundLabel = true;
break;
}
}
if(!foundLabel)
{
increaseFP(predLab);
}
}
accuracy+=common_labels/(double)(allLabels(true_labels,predicted_values));
example_based_precision += common_labels/(double)predicted_values.length;
example_based_recall += common_labels/(double)true_labels.length;
example_based_f += (2*common_labels/(double)(true_labels.length+predicted_values.length));
row++;
} // for each test instance
size_of_predicted_label = labels_in_predictions.size();
String output="";
output+= accuracy/(double)row+" ";
output+= example_based_precision/(double)row+" ";
output+= example_based_recall/(double)row +" ";
output+= example_based_f/(double)row+" ";
output+= macroPrecision()+" ";
output+=macroRecall()+" ";
output+=macroFmeasure()+" ";
output+=microPrecision()+" ";
output+=microRecall()+" ";
output+=microFmeasure();
System.out.print(output);
if(this.verbosity){
System.out.println("\nAccuracy: "+accuracy/(double)row);
System.out.println("EBP :"+example_based_precision/(double)row);
System.out.println("EBR :"+example_based_recall/(double)row);
System.out.println("EBF :"+example_based_f/(double)row);
System.out.println("MaP :"+macroPrecision());
System.out.println("MaR :"+macroRecall());
System.out.println("MaF :"+macroFmeasure());
System.out.println("MiP :"+microPrecision());
System.out.println("MiR :"+microRecall());
System.out.println("MiF :"+microFmeasure());
}
} catch (IOException ex) {
System.out.println("File not found: "+resultsFile + " or unable to read file");
System.out.println(ex.getMessage());
}catch(NumberFormatException exn){
System.out.println(exn);
System.out.println("Line: "+ row);
}finally{
try{
if (br!=null){
br.close();
}
}catch(IOException ex){
System.out.println(ex);
}
}
}
int allLabels(String list1[],String list2[])
{
HashSet<Integer> labels_per_instance = new HashSet<Integer>();
for(int i=0;i<list1.length;i++)
labels_per_instance.add(new Integer(Integer.parseInt(list1[i])));
for(int i=0;i<list2.length;i++)
labels_per_instance.add(new Integer(Integer.parseInt(list2[i])));
return labels_per_instance.size();
}
/**
* Describe parameters for calling the evaluation script
*/
private static void usage()
{
System.out.println("Usage: "+Evaluator.class.getName()+" goldendata systemanswers [-verbose]");
System.out.println("goldendata systemanswers are the files (golden and submitted respectively)");
System.out.println("verbose (optional) enables human readable output.");
}
public static void main(String args[])
{
// The main function to perform the evaluation of a multi-label classification task.
// args[0] holds the file with the true labels
// args[1] holds the file with the system's labels
// The format of the two files is as following:
//
// 145 4567 22213
// 234 5321 3456
// 123
// 123 125
// etc.
//
//
// Each line holds the labels for the corresponding instance seperated by a space
Options opt = new Options();
opt.addOption("verbose",false,"verbose output");
CommandLineParser parser = new PosixParser();
try {
CommandLine line = parser.parse(opt, args);
if(args.length<2)
{
usage();
System.exit(0);
}
Evaluator eval = new Evaluator();
if(line.hasOption("verbose")){
eval.setVerbosity(true);
}
eval.loadTrueLabels(args[0]);
eval.evaluateTLExternal(args[1]);
} catch (ParseException ex) {
Logger.getLogger(Evaluator.class.getName()).log(Level.SEVERE, null, ex);
}
}
/**
* @return the verbosity
*/
public boolean isVerbosity() {
return verbosity;
}
/**
* @param verbosity the verbosity to set
*/
public void setVerbosity(boolean verbosity) {
this.verbosity = verbosity;
}
}
| 16,349 | 31.248521 | 130 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/evaluation/EvaluatorTask1b.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package evaluation;
import data.Question;
import data.Task1bData;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
/** This script called for BioASQ Task B evaluation, both Phases.
*
* Example calls
* Phase A
* java -cp BioASQEvaluation2018.jar evaluation.EvaluatorTask1b -phaseA -e 5 "...\golden.json" "...\submission_PhasA.json" -verbose
* or java -cp BioASQEvaluation2018.jar evaluation.EvaluatorTask1b -phaseA -e 5 "...\golden.json" "...\submission_PhasA.json"
* Phase B
* java -cp BioASQEvaluation2018.jar evaluation.EvaluatorTask1b -phaseB -e 5 "...\golden.json" "...\submission_PhasB.json" -verbose
* or java -cp BioASQEvaluation2018.jar evaluation.EvaluatorTask1b -phaseB -e 5 "...\golden.json" "...\submission_PhasB.json"
*
* @author tasosnent
*/
public class EvaluatorTask1b {
Task1bData goldenData;
Task1bData systemResp;
double epsilon=0.00001;
// The same as in Task1bData
int VERSION_OF_CHALLENGE=8; // we use this to have modified versions of the measures for different BioASQ years
// Use version 2 for BioASQ1&2, version 3 for BioASQ3&4, version 5 since BioASQ5,version 8 since BioASQ8
public static final int BIOASQ2=2,BIOASQ3=3,BIOASQ5=5,BIOASQ8=8;
boolean verbosity = false;
/**
* Reads golden data and submission data from corresponding files
* @param golden golden file
* @param system submitted file, for evaluation
* @param version The version of the Challenge // Use version 2 for BioASQ1&2, version 3 for BioASQ3&4, version 5 since BioASQ5,version 8 since BioASQ8
*/
public EvaluatorTask1b(String golden, String system,int version)
{
this.setVERSION_OF_CHALLENGE(version);
//Golden data object
goldenData = new Task1bData(version, true);
//System responce object
systemResp = new Task1bData(version, false);
try {
goldenData.readData(golden);
systemResp.readData(system);
} catch (IOException ex) {
Logger.getLogger(EvaluatorTask1b.class.getName()).log(Level.SEVERE, null, ex);
}
}
/**
* Calculate evaluation measures for Phase A
*/
public void EvaluatePhaseA()
{
// Question-level measures: An array with an evaluator object (with evaluation measures calculated) for each question of golden set
ArrayList<QuestionAnswerEvaluator> qevalArray = new ArrayList<QuestionAnswerEvaluator>();
// System.out.println("Golden data: "+goldenData.numQuestions());
// System.out.println("System replies: "+systemResp.numQuestions());
// For each question in golden data
for(int i=0;i<goldenData.numQuestions();i++)
{
Question gold = goldenData.getQuestion(i);
Question resp = systemResp.getQuestion(gold.getId());
if(resp==null)
continue;
// Create an evaluator for this pair
QuestionAnswerEvaluator qeval =new QuestionAnswerEvaluator(gold.getId(),this.VERSION_OF_CHALLENGE);
// Calculate evaluation measures for phase B
qeval.calculateMeasuresForPair(gold, resp);
//put to qevalArray
qevalArray.add(qeval);
}
// Now, give the array with "question-level measures" to calculate "set-level measures" (averaging) for each type of answer items:
// concepts
System.out.print(
MeanPrecisionConcepts(qevalArray)+" "+
MeanRecallConcepts(qevalArray)+" "+
MeanF1Concepts(qevalArray)+" "+
MapConcepts(qevalArray)+" "+
GMapConcepts(qevalArray)+" ");
// articles
System.out.print(
MeanPrecisionArticles(qevalArray)+" "+
MeanRecallArticles(qevalArray)+" "+
MeanF1Articles(qevalArray)+" "+
MapDocuments(qevalArray)+" "+
GMapDocuments(qevalArray)+" ");
// snippets
System.out.print(
MeanPrecisionSnippets(qevalArray)+" "+
MeanRecallSnippets(qevalArray)+" "+
MeanF1Snippets(qevalArray)+" "+
MapSnippets(qevalArray)+" "+
GMapSnippets(qevalArray)+" ");
// Triples
System.out.print(
MeanPrecisionTriples(qevalArray)+" "+
MeanRecallTriples(qevalArray)+" "+
MeanF1Triples(qevalArray)+" "+
MapTriples(qevalArray)+" "+
GMapTriples(qevalArray));
if(this.verbosity){
System.out.println();
System.out.println("MPrec concepts: "+MeanPrecisionConcepts(qevalArray));
System.out.println("MRec concepts: "+MeanRecallConcepts(qevalArray));
System.out.println("MF1 concepts: "+MeanF1Concepts(qevalArray));
System.out.println("MAP concepts: "+MapConcepts(qevalArray));
System.out.println("GMAP concepts: "+GMapConcepts(qevalArray));
System.out.println("MPrec documents: "+MeanPrecisionArticles(qevalArray));
System.out.println("MRec documents: "+MeanRecallArticles(qevalArray));
System.out.println("MF1 documents: "+MeanF1Articles(qevalArray));
System.out.println("MAP documents: "+MapDocuments(qevalArray));
System.out.println("GMAP documents: "+GMapDocuments(qevalArray));
System.out.println("MPrec snippets: "+MeanPrecisionSnippets(qevalArray));
System.out.println("MRec snippets: "+MeanRecallSnippets(qevalArray));
System.out.println("MF1 snippets: "+MeanF1Snippets(qevalArray));
System.out.println("MAP snippets: "+MapSnippets(qevalArray));
System.out.println("GMAP snippets: "+GMapSnippets(qevalArray));
System.out.println("MPrec triples: "+MeanPrecisionTriples(qevalArray));
System.out.println("MRec triples: "+MeanRecallTriples(qevalArray));
System.out.println("MF1 triples: "+MeanF1Triples(qevalArray));
System.out.println("MAP triples: "+MapTriples(qevalArray));
System.out.println("GMAP triples: "+GMapTriples(qevalArray));
}
}
/**
* Calculate evaluation measures for Phase B
*/
public void EvaluatePhaseB()
{
// Question-level measures: An array with an evaluator object (with evaluation measures calculated) for each question of golden set
ArrayList<QuestionAnswerEvaluator> qevalArray = new ArrayList<QuestionAnswerEvaluator>();
// For each question in golden data
for(int i=0;i<goldenData.numQuestions();i++)
{
Question gold = goldenData.getQuestion(i);
Question resp = systemResp.getQuestion(gold.getId());
if(resp==null) continue;
// Create an evaluator for this pair
QuestionAnswerEvaluator qeval =new QuestionAnswerEvaluator(gold.getId(),
gold.getType(),this.VERSION_OF_CHALLENGE);
// Calculate evaluation measures for phase B
qeval.calculatePhaseBMeasuresForPair(gold, resp);
//put to qevalArray
qevalArray.add(qeval);
}
// Now, give the array with "question-level measures" to calculate "set-level measures" (averaging)
System.out.print(
AccuracyExactAnswersYesNo(qevalArray)+" "
+strictAccuracy(qevalArray)+" "
+lenientAccuracy(qevalArray)+" "
+meanReciprocalRank(qevalArray)+" "
+listPrecision(qevalArray) +" "
+listRecall(qevalArray)+" "
+listF1(qevalArray)+" "
+macroF1ExactAnswersYesNo(qevalArray)+" "
+F1ExactAnswersYesNo(qevalArray,true)+" "
+F1ExactAnswersYesNo(qevalArray,false));
if(this.verbosity){
System.out.println();
System.out.println("YesNo Acc: "+AccuracyExactAnswersYesNo(qevalArray));
System.out.println("Factoid Strict Acc: "+strictAccuracy(qevalArray));
System.out.println("Factoid Lenient Acc: "+lenientAccuracy(qevalArray));
System.out.println("Factoid MRR: "+meanReciprocalRank(qevalArray));
System.out.println("List Prec: "+listPrecision(qevalArray));
System.out.println("List Rec: "+listRecall(qevalArray));
System.out.println("List F1: "+listF1(qevalArray));
System.out.println("YesNo macroF1: "+macroF1ExactAnswersYesNo(qevalArray));
System.out.println("YesNo F1 yes: "+F1ExactAnswersYesNo(qevalArray,true));
System.out.println("YesNo F1 no: "+F1ExactAnswersYesNo(qevalArray,false));
}
}
/** Phase B Measures **/
/**
* Calculate Accuracy for YesNo questions
* @param qeval Object with question-level evaluation measures
* @return Accuracy for YesNo questions
*/
public double AccuracyExactAnswersYesNo(ArrayList<QuestionAnswerEvaluator> qeval)
{
int k=0; // All Yes-No questions (Test-set size) : All Positive + All Negative [P+N]
double m=0; // All true predicted : true positive + true negative [TP + TN]
// For all questions in test-set
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).getQuestion_type()==Question.YESNO)
{
m+= qeval.get(i).getAccuracyYesNo();
k++;
}
}
if(k==0)
return 0;
return m/k;
}
/**
* Calculate F1 measure for YesNo questions
* @param qeval Object with question-level evaluation measures
* @param yes_label label for the F1 measure: true for label "yes", false for label "no"
* @return F1 measure for given label for YesNo questions F1yes for yes_label = true, F1no for yes_label = false
*/
public double F1ExactAnswersYesNo(ArrayList<QuestionAnswerEvaluator> qeval, boolean yes_label)
{
int k=0; // All Yes-No questions (Test-set size) : All Positive + All Negative [P+N]
// A confusion martix
ConfusionMatrix cm = new ConfusionMatrix();
// For all questions in test-set
for(int i=0;i<qeval.size();i++)
{
// If it is a yes-no question
if(qeval.get(i).getQuestion_type()==Question.YESNO)
{
if(qeval.get(i).is_yes == yes_label){
// it is a "Positive example" (either yes or no, depending on label given)
if(qeval.get(i).getAccuracyYesNo() == 1){ // If accurate prediction, increase True positive
cm.increaseTP();
} else { // Else, this positive example was predicted as negative
cm.increaseFP();
}
} else {
// it is a "Negative example" (either yes or no, depending on label given)
if(qeval.get(i).getAccuracyYesNo() == 1){ // If accurate prediction, increase True negative
cm.increaseTN();
} else { // Else, this negative example was predicted as positive
cm.increaseFN();
}
}
k++;
}
}
// F1 = 2TP / (2TP + FP + FN)
double a = 2*(double)cm.getTp(); // 2 TP
double b = (2*(double)cm.getTp() + (double)cm.getFp() + (double)cm.getFn()); // (2TP + FP + FN)
if(k==0 || b==0)
return 0; // No YesNo questions found or all of them belong to the other label and were correctly predicted (TN)
return a/b; // F1 = 2TP / (2TP + FP + FN)
}
/**
* Calculate macro averaged F1 measure for YesNo questions
* @param qeval Object with question-level evaluation measures
* @return macro averaged F1 measure for YesNo questions
*/
public double macroF1ExactAnswersYesNo(ArrayList<QuestionAnswerEvaluator> qeval){
// macroF1 = (F1yes + F1no) / 2
return (F1ExactAnswersYesNo(qeval, true) + F1ExactAnswersYesNo(qeval, false)) / 2;
}
/**
* Calculate strictAccuracy for factoid questions
* @param qeval Object with question-level evaluation measures
* @return strictAccuracy for factoid questions
*/
public double strictAccuracy(ArrayList<QuestionAnswerEvaluator> qeval)
{
int k=0;
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).getQuestion_type()==Question.FACTOID)
{
m+= qeval.get(i).getStrictAccuracy();
k++;
}
}
if(k==0)
return 0;
return m/k;
}
/**
* Calculate lenientAccuracy for factoid questions
* @param qeval Object with question-level evaluation measures
* @return lenientAccuracy for factoid questions
*/
public double lenientAccuracy(ArrayList<QuestionAnswerEvaluator> qeval)
{
int k=0;
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).getQuestion_type()==Question.FACTOID)
{
m+= qeval.get(i).getLenientAccuracy();
k++;
}
}
if(k==0)
return 0;
return m/k;
}
/**
* Calculate meanReciprocalRank for factoid questions
* @param qeval Object with question-level evaluation measures
* @return meanReciprocalRank for factoid questions
*/
public double meanReciprocalRank(ArrayList<QuestionAnswerEvaluator> qeval)
{
int k=0;
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).getQuestion_type()==Question.FACTOID)
{
m+= qeval.get(i).getMRR();
k++;
}
}
if(k==0)
return 0;
return m/k;
}
/**
* Calculate Precision for list questions
* @param qeval Object with question-level evaluation measures
* @return Precision for list questions
*/
public double listPrecision(ArrayList<QuestionAnswerEvaluator> qeval)
{
int k=0;
double pre=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).getQuestion_type()==Question.LIST)
{
if(Double.isNaN(qeval.get(i).getPrecisionEA()))
pre+=0;
else
pre+= qeval.get(i).getPrecisionEA();
k++;
}
}
if(k==0)
return 0;
return pre/k;
}
/**
* Calculate Recall for list questions
* @param qeval Object with question-level evaluation measures
* @return Recall for list questions
*/
public double listRecall(ArrayList<QuestionAnswerEvaluator> qeval)
{
int k=0;
double recall=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).getQuestion_type()==Question.LIST)
{
if(Double.isNaN(qeval.get(i).getRecallEA()))
recall+=0;
else
recall+= qeval.get(i).getRecallEA();
k++;
}
}
if(k==0)
return 0;
return recall/k;
}
/**
* Calculate F1 for list questions
* @param qeval Object with question-level evaluation measures
* @return F1 for list questions
*/
public double listF1(ArrayList<QuestionAnswerEvaluator> qeval)
{
int k=0;
double f1=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).getQuestion_type()==Question.LIST)
{
//System.out.println(qeval.get(i).getF1EA());
if(Double.isNaN(qeval.get(i).getF1EA()))
f1+=0;
else
f1+=qeval.get(i).getF1EA();
k++;
}
}
if(k==0)
return 0;
return f1/k;
}
/** Phase A Measures **/
/**
* Calculate MAP for concepts
* @param qeval Object with question-level evaluation measures
* @return MAP for concepts
*/
public double MapConcepts(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int sz=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).hasQuestionConcepts()){
if(Double.isNaN(qeval.get(i).getAveragePrecisionConcepts()))
m+=0;
else
m+=qeval.get(i).getAveragePrecisionConcepts();
sz++;
}
}
if(sz==0)
return 0;
return m/sz;
}
/**
* Calculate MeanPrecision for concepts
* @param qeval Object with question-level evaluation measures
* @return MeanPrecision for concepts
*/
public double MeanPrecisionConcepts(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int sz=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).hasQuestionConcepts()){
if(Double.isNaN(qeval.get(i).getConceptsPrecision()))
m+=0;
else
m+=qeval.get(i).getConceptsPrecision();
sz++;
}
}
if(sz==0)
return 0;
return m/sz;
}
/**
* Calculate MeanRecall for concepts
* @param qeval Object with question-level evaluation measures
* @return MeanRecall for concepts
*/
public double MeanRecallConcepts(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int sz=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).hasQuestionConcepts()){
if(Double.isNaN(qeval.get(i).getConceptsRecall()))
m+=0;
else
m+=qeval.get(i).getConceptsRecall();
sz++;
}
}
if(sz==0)
return 0;
return m/sz;
}
/**
* Calculate MeanF1 for concepts
* @param qeval Object with question-level evaluation measures
* @return MeanRecall for concepts
*/
public double MeanF1Concepts(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int sz=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).hasQuestionConcepts()){
if(Double.isNaN(qeval.get(i).getConceptsF1()))
m+=0;
else
m+=qeval.get(i).getConceptsF1();
sz++;
}
}
if(sz==0)
return 0;
return m/sz;
}
/**
* Calculate MeanPrecision for articles
* @param qeval Object with question-level evaluation measures
* @return MeanPrecision for articles
*/
public double MeanPrecisionArticles(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int sz=0;
for(int i=0;i<qeval.size();i++)
{
if(Double.isNaN(qeval.get(i).getArticlesPrecision()))
m+=0;
else
m+=qeval.get(i).getArticlesPrecision();
sz++;
}
if(sz==0)
return 0;
return m/sz;
}
/**
* Calculate MeanRecall for articles
* @param qeval Object with question-level evaluation measures
* @return MeanRecall for articles
*/
public double MeanRecallArticles(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int sz=0;
for(int i=0;i<qeval.size();i++)
{
if(Double.isNaN(qeval.get(i).getArticlesRecall()))
m+=0;
else
m+=qeval.get(i).getArticlesRecall();
sz++;
}
if(sz==0)
return 0;
return m/qeval.size();
}
/**
* Calculate MeanF1 for articles
* @param qeval Object with question-level evaluation measures
* @return MeanF1 for articles
*/
public double MeanF1Articles(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(Double.isNaN(qeval.get(i).getArticlesF1()))
m+=0;
else
m+=qeval.get(i).getArticlesF1();
}
return m/qeval.size();
}
/**
* Calculate MeanPrecision for Snippets
* @param qeval Object with question-level evaluation measures
* @return MeanPrecision for Snippets
*/
public double MeanPrecisionSnippets(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(Double.isNaN(qeval.get(i).getSnippetsPrecision())){
m+=0;System.out.println("isnan");}
else
m+=qeval.get(i).getSnippetsPrecision();
}
return m/qeval.size();
}
/**
* Calculate MeanRecall for Snippets
* @param qeval Object with question-level evaluation measures
* @return MeanRecall for Snippets
*/
public double MeanRecallSnippets(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(Double.isNaN(qeval.get(i).getSnippetsRecall())){
m+=0;System.out.println("isnan");
}
else
m+=qeval.get(i).getSnippetsRecall();
}
return m/qeval.size();
}
/**
* Calculate MeanF1 for Snippets
* @param qeval Object with question-level evaluation measures
* @return MeanF1 for Snippets
*/
public double MeanF1Snippets(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(Double.isNaN(qeval.get(i).getSnippetsF1()))
m+=0;
else
m+=qeval.get(i).getSnippetsF1();
}
return m/qeval.size();
}
/**
* Calculate MeanPrecision for Triples
* @param qeval Object with question-level evaluation measures
* @return MeanPrecision for Triples
*/
public double MeanPrecisionTriples(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int num=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).is_triple){
if(Double.isNaN(qeval.get(i).getTriplesPrecision()))
m+=0;
else
m+=qeval.get(i).getTriplesPrecision();
num++;
}
}
if(num==0)
return 0;
return m/(double)num;
}
/**
* Calculate MeanRecall for Triples
* @param qeval Object with question-level evaluation measures
* @return MeanRecall for Triples
*/
public double MeanRecallTriples(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int num=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).is_triple){
if(Double.isNaN(qeval.get(i).getTriplesRecall()))
m+=0;
else
m+=qeval.get(i).getTriplesRecall();
num++;
}
}
if(num==0)
return 0;
return m/(double)num;
}
/**
* Calculate MeanF1 for Triples
* @param qeval Object with question-level evaluation measures
* @return MeanF1 for Triples
*/
public double MeanF1Triples(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int num=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).is_triple){
if(Double.isNaN(qeval.get(i).getTriplesF1()))
m+=0;
else
m+=qeval.get(i).getTriplesF1();
num++;
}
}
if(num==0)
return 0;
return m/(double)num;
}
/**
* Calculate Map for Documents
* @param qeval Object with question-level evaluation measures
* @return Map for Documents
*/
public double MapDocuments(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(Double.isNaN(qeval.get(i).getAveragePrecisionDocuments()))
m+=0;
else
m+=qeval.get(i).getAveragePrecisionDocuments();
}
return m/qeval.size();
}
/**
* Calculate Map for Triples
* @param qeval Object with question-level evaluation measures
* @return Map for Triples
*/
public double MapTriples(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int num = 0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).is_triple){
if(Double.isNaN(qeval.get(i).getAveragePrecisionTriples()))
m+=0;
else
m+=qeval.get(i).getAveragePrecisionTriples();
num++;
}
}
if(num==0)
return 0;
return m/num;
}
/**
* Calculate MeanRecall for Snippets
* @param qeval Object with question-level evaluation measures
* @return MeanRecall for Snippets
*/
public double MapSnippets(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(Double.isNaN(qeval.get(i).getAveragePrecisionSnippets()))
m+=0;
else
m+=qeval.get(i).getAveragePrecisionSnippets();
}
return m/qeval.size();
}
/**
* Calculate GMap for Concepts
* @param qeval Object with question-level evaluation measures
* @return GMap for Concepts
*/
public double GMapConcepts(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int sz=0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).hasQuestionConcepts())
{
if(Double.isNaN(qeval.get(i).getAveragePrecisionConcepts()))
m+=0;
else
m+=Math.log(qeval.get(i).getAveragePrecisionConcepts() + epsilon);
sz++;
}
}
if(sz==0)
return 0;
return Math.exp(m/sz);
}
/**
* Calculate GMap for Documents
* @param qeval Object with question-level evaluation measures
* @return GMap for Documents
*/
public double GMapDocuments(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m = 0;
double k=0;
for (int i = 0; i < qeval.size(); i++) {
/*if(qeval.get(i).getAveragePrecisionDocuments()==0.0)
{
System.out.println(qeval.get(i).getQuestionID());
}
System.out.println(qeval.get(i).getAveragePrecisionDocuments());*/
if (Double.isNaN(qeval.get(i).getAveragePrecisionDocuments())) {
m += Math.log(epsilon);
} else {
m += Math.log(qeval.get(i).getAveragePrecisionDocuments() + epsilon);
}
}
return Math.exp(m/qeval.size());
}
/**
* Calculate GMap for Triples
* @param qeval Object with question-level evaluation measures
* @return GMap for Triples
*/
public double GMapTriples(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
int num = 0;
for(int i=0;i<qeval.size();i++)
{
if(qeval.get(i).is_triple){
m+=Math.log(qeval.get(i).getAveragePrecisionTriples()+epsilon);
num++;
}
}
if(num==0)
return 0;
return Math.exp(m/num);
}
/**
* Calculate GMap for Snippets
* @param qeval Object with question-level evaluation measures
* @return GMap for Snippets
*/
public double GMapSnippets(ArrayList<QuestionAnswerEvaluator> qeval)
{
double m=0;
for(int i=0;i<qeval.size();i++)
{
if(Double.isNaN(qeval.get(i).getAveragePrecisionSnippets()))
m+=0;
else
m+=Math.log(qeval.get(i).getAveragePrecisionSnippets()+epsilon);
}
if(Double.isNaN(m))
return 0;
if(m==0)
return 0;
return Math.exp(m/qeval.size());
}
/**
* Options recognized for calling this script
* @return Options initialized object
*/
private static Options createOptions()
{
Options opt = new Options();
opt.addOption("e", true, "edition of BioASA challenge");
opt.addOption("phaseA",false,"phase A of Task B");
opt.addOption("phaseB",false,"phase B of Task B");
opt.addOption("verbose",false,"verbose output");
return opt;
}
/**
* Set the version of challenge
* @param VERSION_OF_CHALLENGE
*/
private void setVERSION_OF_CHALLENGE(int VERSION_OF_CHALLENGE) {
this.VERSION_OF_CHALLENGE = VERSION_OF_CHALLENGE;
}
/**
* Set verbosity parameter
* @param verbosity
*/
public void setVerbosity(boolean verbosity) {
this.verbosity = verbosity;
}
/**
* Describe parameters for calling the evaluation script
*/
private static void usage()
{
System.out.println("Usage: -phaseX [-e version] goldenfile systemfile [-verbose]");
System.out.println("Where X can be either A or B for the corresponding phases,");
System.out.println("goldenfile systemfile are the files (golden and submitted respectively) ");
System.out.println("and version of the challenge can be 2 (for BioASQ1&2), 3 (for BioASQ3&4), 5 (for BioASQ5,6&7) or 8 (for BioASQ8 and later). "
+ "This argument is optional - default value is 2)");
System.out.println("verbose, also optional, enables human readable output.");
}
/**
* Handle initial call of evaluation script, taking into account the parameters given.
* @param args
*/
public static void main(String args[])
{
Options opt = EvaluatorTask1b.createOptions();
CommandLineParser parser = new PosixParser();
try {
CommandLine line = parser.parse(opt, args);
String e;
EvaluatorTask1b eval;
if (!line.hasOption("phaseA") && !line.hasOption("phaseB")) {
EvaluatorTask1b.usage();
System.exit(0);
}
if (line.hasOption("e")) {
e = line.getOptionValue("e");
if (e == null) {
EvaluatorTask1b.usage();
System.exit(0);
}
eval = new EvaluatorTask1b(args[3], args[4],Integer.parseInt(e));
} else {
eval = new EvaluatorTask1b(args[1], args[2],EvaluatorTask1b.BIOASQ2);
}
if(line.hasOption("verbose")){
eval.setVerbosity(true);
}
if (line.hasOption("phaseA")) {
eval.EvaluatePhaseA();
}
if (line.hasOption("phaseB")) {
eval.EvaluatePhaseB();
}
} catch (ParseException ex) {
Logger.getLogger(Evaluator.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
| 32,682 | 32.76343 | 159 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/evaluation/QuestionAnswerEvaluator.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package evaluation;
import data.CalculatedMeasures;
import data.ExactAnswer;
import data.Question;
import data.Snippet;
import java.util.ArrayList;
/**
* A class with "question-level measures"
* An object of this class is used to represent each submitted question during test-set-level measure calculation.
* @author tasosnent
*/
public class QuestionAnswerEvaluator {
// Phase A question-level measures
CalculatedMeasures concepts;
CalculatedMeasures articles;
CalculatedMeasures triples;
CalculatedMeasures snippets;
// Phase B question-level measures
CalculatedMeasures exact_answers;
String question_id;
int question_type;
Boolean is_yes=false; // Flag: when true this question is a yesno question and its golden answer is "yes"
Boolean is_triple=false; // Flag: when true this question has at leat one golden triple
Boolean has_concepts=false; // Flag: when true this question has at leat one golden concept
int VERSION_OF_CHALLENGE; // Use version 2 for BioASQ1&2, version 3 for BioASQ3&4, version 5 since BioASQ5, version 8 since BioASQ8
/**
* Constructor for phase A
* @param id question id
* @param version the version of the challenge
* @param fl This is not used. TODO: Delete this parameter
*/
public QuestionAnswerEvaluator(String id,int version)
{
concepts= new CalculatedMeasures();
articles=new CalculatedMeasures();
triples=new CalculatedMeasures();
snippets=new CalculatedMeasures();
question_id = id;
VERSION_OF_CHALLENGE = version;
}
/**
* Constructor for phase B
* @param id question id
* @param qt question type
* @param version the version of the challenge
*/
public QuestionAnswerEvaluator(String id,int qt,int version)
{
exact_answers = new CalculatedMeasures();
question_id = id;
question_type = qt;
VERSION_OF_CHALLENGE = version;
}
/**
* Calculate Phase B question-level evaluation measures depending on the corresponding question type
* and update corresponding CalculatedMeasures object (i.e. exact_answers)
* @param golden golden question
* @param response submitted question
*/
public void calculatePhaseBMeasuresForPair(Question golden,Question response)
{
if(question_type == Question.FACTOID)
{
if(this.VERSION_OF_CHALLENGE == evaluation.EvaluatorTask1b.BIOASQ2){
strictAccuracy(golden.getExact_answer(),response.getExact_answer(),exact_answers);
lenientAccuracy(golden.getExact_answer(),response.getExact_answer(),exact_answers);
meanReciprocalRank(golden.getExact_answer(),response.getExact_answer(),exact_answers);
} // Since BioASQ3 up to five answers can be submitted for factoid questions
else if(this.VERSION_OF_CHALLENGE==evaluation.EvaluatorTask1b.BIOASQ3 || this.VERSION_OF_CHALLENGE==evaluation.EvaluatorTask1b.BIOASQ5 || this.VERSION_OF_CHALLENGE==evaluation.EvaluatorTask1b.BIOASQ8)
{
strictAccuracyForLists(golden.getExact_answer(),response.getExact_answer(),exact_answers);
lenientAccuracyForLists(golden.getExact_answer(),response.getExact_answer(),exact_answers);
meanReciprocalRankForLists(golden.getExact_answer(),response.getExact_answer(),exact_answers);
}
}
else if(question_type == Question.LIST)
{
calculatePRFforListQuestions(golden.getExact_answer(),response.getExact_answer(),exact_answers);
}
else if(question_type == Question.YESNO)
{
// find accuracy (i.e. if this is a true prediction - yes or no)
accuracyYesNo(golden.getExact_answer(),response.getExact_answer(),exact_answers);
// Also store the correct label - yes or no - for label based evaluation (F1-yes, F1-no and macro F1)
this.is_yes = golden.getExact_answer().getAnswer().equalsIgnoreCase("yes");
}
}
/**
* Calculate Phase A question-level evaluation measures
* and update corresponding CalculatedMeasures objects (i.e. articles, snippets etc)
* for concepts and snippets also take into account questions not having any golden concepts/snippets at all
* @param golden golden question
* @param response submitted question
*/
public void calculateMeasuresForPair(Question golden,Question response)
{try{
if(golden.getConcepts().size()>0 && !response.getConcepts().isEmpty())
{
calculatePRF(golden.getConcepts(), response.getConcepts(), concepts);
has_concepts = true;
}}
catch(Exception ex){System.out.println(ex.toString());System.out.println(golden.getId());}
calculatePRF(golden.getDocuments(), response.getDocuments(), articles);
if(!golden.getTriples().isEmpty())
{
calculatePRF(golden.getTriples(), response.getTriples(), triples);
is_triple = true;
}
concatenateSnippets(golden.getSnippets());
concatenateSnippets(response.getSnippets());
calculatePRForSnippets(golden.getSnippets(), response.getSnippets(),snippets);
// Why existence of concepts isn't checked similarly to triples? ( calculateAveragePrecision internally handles this case by assigning 0 Average precision)
// TODO: add check for concept existence
// if(!golden.getConcepts().isEmpty())
calculateAveragePrecision(golden.getConcepts(), response.getConcepts(), concepts);
calculateAveragePrecision(golden.getDocuments(), response.getDocuments(), articles);
if(!golden.getTriples().isEmpty())
calculateAveragePrecision(golden.getTriples(), response.getTriples(), triples);
calculateAveragePrecisionSnippets(golden.getSnippets(), response.getSnippets(), snippets);
}
/** Phase A Measures **/
/**
* Calculate Precision, Recall and Fmeasure for snippets of this submission
* and update the cm object accordingly (i.e. snippets)
* @param listGolden golden snippets
* @param listResponses submitted snippets
* @param cm question-level measures object to store the results (i.e. snippets)
*/
public void calculatePRForSnippets(ArrayList<Snippet> listGolden, ArrayList<Snippet> listResponses, CalculatedMeasures cm)
{
if(listResponses.isEmpty())
{
return;
}
int resp_size=0;
int total_overlap=0;
int g_size=0;
int skippeddocs=0;
for(int i=0;i<listResponses.size();i++)
{
Snippet sn = listResponses.get(i);
/*if(listPubMedCentral.containsKey(sn.getDocumentOnlyID())){ // skip the documents that come from PubMedCentral
skippeddocs++; continue;
}*/
// if(sn.getSize()<0)
// {System.out.println(this.question_id);System.out.println(skippeddocs);System.exit(0);
// }
resp_size += sn.getSize();
int docsfound=0;
for(int j=0;j<listGolden.size();j++)
{
Snippet g = listGolden.get(j);
// if(listPubMedCentral.containsKey(g.getDocumentOnlyID())) // skip the documents that come from PubMedCentral
// continue;
if(sn.getDocumentOnlyID().equals(g.getDocumentOnlyID())) // we can have more than one snippet per document and per paragraph
{docsfound++;
total_overlap += sn.overlap(g);
}
}
// System.out.println("Docs found: "+docsfound +" question: "+this.question_id +" doc: "+sn.getDocument());
// System.out.println("Total overlap :" + total_overlap);
}
for(int j=0;j<listGolden.size();j++)
{
Snippet g = listGolden.get(j);
// if(listPubMedCentral.containsKey(g.getDocumentOnlyID())) // skip the documents that come from PubMedCentral
// continue;
g_size+=g.getSize();
}
// System.out.println("Total overlap :" + total_overlap +" Resp size: "+resp_size +" gold: "+g_size);
if(resp_size != 0)
cm.setPrecision((double)total_overlap/((double)resp_size));
if(g_size!=0)
cm.setRecall((double)total_overlap/(double)g_size);
if(cm.getPrecision()!=0 || cm.getRecall()!=0)
cm.setFmeasure(2*cm.getPrecision()*cm.getRecall()/(cm.getPrecision()+cm.getRecall()));
}
/**
* Calculate Precision, Recall and Fmeasure for elements (except snippets: i.e. for documents, concepts or triples) of this submission
* and update the corresponding cm object accordingly
* @param listGolden golden elements (e.g. documents)
* @param listResponses submitted elements (e.g. documents)
* @param cm question-level measures object to store the results (e.g. articles)
*/
public void calculatePRF(ArrayList listGolden, ArrayList listResponses, CalculatedMeasures cm)
{
double tp=0,fp=0,fn=0;
if(listResponses.isEmpty())
{
return;
}
for(int i=0;i<listResponses.size();i++)
{
Object item = listResponses.get(i);
if(listGolden.contains(item))
tp++;
else
{
fp++;
}
}
for(int i=0;i<listGolden.size();i++)
{
Object item = listGolden.get(i);
if(!listResponses.contains(item))
fn++;
}
cm.setPrecision(tp/(tp+fp));
if((fn+tp)!=0)
cm.setRecall(tp/(tp+fn));
if(cm.getPrecision()!=0 && cm.getRecall()!=0)
cm.setFmeasure(2*cm.getPrecision()*cm.getRecall()/(cm.getPrecision()+cm.getRecall()));
}
/**
* Calculate Average Precision for this answer - list of elements (documents, concepts, triples - not snippets)
* and update the corresponding cm object accordingly
* @param listGolden golden elements (e.g. documents)
* @param listResponses submitted elements (e.g. documents)
* @param cm question-level measures object to store the results (e.g. articles)
*/
public void calculateAveragePrecision(ArrayList listGolden, ArrayList listResponses, CalculatedMeasures cm)
{
double ap=0;
for(int i=0;i<listResponses.size();i++)
{
ap+=precisionAtRfirstItems(i+1, listGolden, listResponses)*relevance(listResponses.get(i), listGolden);
}
// If none of the response elements is corect, 0 is returned.
// This also handles the case that the golden list is empty! (i.e. correct responses will alsways be 0 in this case)
listResponses.retainAll(listGolden);
if(listResponses.isEmpty()){
cm.setAverage_precision(0);
return;
}
// ** UPDATE 17/02/2015 : in BioASQ 3 we divide with 10. Please
// check the guidlines **
if(VERSION_OF_CHALLENGE==EvaluatorTask1b.BIOASQ2)
// we should divide with the size of the golden list
cm.setAverage_precision(ap/(double)listGolden.size());
else if(VERSION_OF_CHALLENGE==EvaluatorTask1b.BIOASQ3 || this.VERSION_OF_CHALLENGE==evaluation.EvaluatorTask1b.BIOASQ5)
cm.setAverage_precision(ap/10.0);
else if(VERSION_OF_CHALLENGE==EvaluatorTask1b.BIOASQ8)
{cm.setAverage_precision(ap/ Math.min(10.0,(double)listGolden.size()));
}
}
/**
* Calculate Precision at R first items for this answer - list of elements (documents, concepts etc)
* Used for MAP calculation
* @param r number of element to be taken into account
* @param listGolden golden elements (e.g. documents)
* @param listResponses submitted elements (e.g. documents)
* @return precision of submitted list taking into account r first elements submitted
*/
public double precisionAtRfirstItems(int r,ArrayList listGolden, ArrayList listResponses)
{
double tp=0,fp=0;
if(listResponses.isEmpty())
{
return 0;
}
for(int i=0;i<r;i++)
{
Object item = listResponses.get(i);
if(listGolden.contains(item))
tp++;
else
{
fp++;
}
}
if((tp+fp)==0)
return 0;
return tp/(tp+fp);
}
/**
* Calculate relevance of a submitted item (e.g. document or concept etc) i.e. if this document is included in the golden list
* Used for MAP calculation
* @param item The item to be checked for relevance
* @param listGolden the golden list to check if contains the item
* @return 1 if contained in golden list (i.e. is relevant) 0 if not
*/
public int relevance(Object item,ArrayList listGolden)
{
if(listGolden.contains(item))
return 1;
return 0;
}
/**
* Calculate Average Precision for this list of Snippets submitted
* and update the corresponding cm object accordingly
* @param listGolden golden Snippets
* @param listResponses submitted Snippets
* @param cm question-level measures object to store the results (i.e. snippets)
*/
public void calculateAveragePrecisionSnippets(ArrayList<Snippet> listGolden, ArrayList<Snippet> listResponses, CalculatedMeasures cm)
{
double ap=0;
for(int i=0;i<listResponses.size();i++)
{
ap+=precisionAtRSnippet(i+1, listGolden, listResponses)*relevanceSnippet(listResponses.get(i), listGolden);
}
// ** UPDATE 17/02/2015 : in BioASQ 3 we divide with 10. Please
// check the guidlines **
if(VERSION_OF_CHALLENGE==EvaluatorTask1b.BIOASQ2)
cm.setAverage_precision(ap/(double)listGolden.size());
else if(VERSION_OF_CHALLENGE==EvaluatorTask1b.BIOASQ3 || this.VERSION_OF_CHALLENGE==evaluation.EvaluatorTask1b.BIOASQ5)
cm.setAverage_precision(ap/10.0);
else if(VERSION_OF_CHALLENGE==EvaluatorTask1b.BIOASQ8)
{cm.setAverage_precision(ap/ Math.min(10.0,(double)listGolden.size()));
}
}
/**
* Calculate Precision at R first snippets of this list
* Used for MAP calculation
* @param r number of snippets to be taken into account
* @param listGolden golden snippets
* @param listResponses submitted snippets
* @return precision of submitted list taking into account r first snippets submitted
*/
public double precisionAtRSnippet(int r,ArrayList<Snippet> listGolden, ArrayList<Snippet> listResponses)
{
if(listResponses.isEmpty())
{
return 0;
}
int resp_size=0;
int total_overlap=0;
int g_size=0;
for(int i=0;i<r;i++)
{
Snippet sn = listResponses.get(i);
resp_size += sn.getSize();
for(int j=0;j<listGolden.size();j++)
{
Snippet g = listGolden.get(j);
if(sn.getDocument().equals(g.getDocument()))
{
total_overlap += sn.overlap(g);
}
}
}
for(int j=0;j<listGolden.size();j++)
{
Snippet g = listGolden.get(j);
g_size+=g.getSize();
}
return (double)total_overlap/((double)resp_size);
}
/**
* Calculate relevance of a submitted snippet i.e. if is included in the golden list
* Used for MAP calculation
* @param ret The Snippet to be checked for relevance
* @param listGolden the golden list to check if contains the snippet
* @return 1 if the snippet overlaps with one contained in golden list (i.e. is relevant) 0 if not
*/
private double relevanceSnippet(Snippet ret, ArrayList<Snippet> listGolden) {
for(int j=0;j<listGolden.size();j++)
{
Snippet g = listGolden.get(j);
if(ret.getDocument().equals(g.getDocument()))
{
if(ret.overlap(g)!=0);
return 1;
}
}
return 0;
}
/**
* Concatenate all snippets in the provided list per document if they overlap.
* @param listsnip
*/
public void concatenateSnippets(ArrayList<Snippet> listsnip)
{
if(listsnip.isEmpty())
{
return;
}
for(int i=0;i<listsnip.size();i++)
{
for(int j=0;j<listsnip.size();j++)
{
if(j==i)
continue;
if(listsnip.get(i).getDocument().equals(listsnip.get(j).getDocument()))
{
if(listsnip.get(i).getFieldNameBegin().equals(listsnip.get(j).getFieldNameBegin())&&
listsnip.get(i).getFieldNameEnd().equals(listsnip.get(j).getFieldNameEnd()))
{
if(listsnip.get(i).itOverlaps(listsnip.get(j))) // merge snippets
{
Snippet merged = listsnip.get(i).merge(listsnip.get(j));
listsnip.remove(i);
listsnip.add(i, merged);
listsnip.remove(j);
// System.out.println("Merging "+listsnip.get(i).getDocument());
// System.out.println(merged.getBegin_index()+" "+merged.getEnd_index());
j=0;
}
}
}
}
}
}
/** Phase B Measures **/
/**
* Calculate Precision, Recall and Fmeasure for this list question answer
* and update the corresponding cm object accordingly
* @param golden golden exact answer
* @param response submitted excact answer
* @param cm question-level measure object to store the results (i.e. exact_answers)
*/
public void calculatePRFforListQuestions(ExactAnswer golden,ExactAnswer response, CalculatedMeasures cm)
{
double tp=0,fp=0,fn=0;
if(response==null||response.getLists().isEmpty())
{
return;
}
for(int i=0;i<response.getLists().size();i++)
{
// check if the answer has a synonym
if(golden.containsAnswerSynonym(response.getLists().get(i),true))
{
tp++;
}
else
{
fp++;
}
}
for(int i=0;i<golden.getLists().size();i++)
{
if(!response.containsAnswerSynonym(golden.getLists().get(i),true))
fn++;
}
//System.out.println("TP: "+tp+" FP: "+fp +" FN: "+fn);
cm.setPrecision(tp/(tp+fp));
if((fn+tp)!=0)
cm.setRecall(tp/(tp+fn));
if(cm.getPrecision()!=0 && cm.getRecall()!=0)
cm.setFmeasure(2*cm.getPrecision()*cm.getRecall()/(cm.getPrecision()+cm.getRecall()));
}
/**Assign Accuracy for the specific submitted YesNo question
*
* @param exact_answer golden answer
* @param response submitted answer
* @param cm object to store measures
*/
private void accuracyYesNo(ExactAnswer exact_answer, ExactAnswer response,CalculatedMeasures cm) {
if(response==null||response.getAnswer().isEmpty()||response.getAnswer()==null)
{
cm.setAccuracy(0.0);return;
}
if(exact_answer.getAnswer().equals(response.getAnswer()))
cm.setAccuracy(1.0);
}
private void strictAccuracy(ExactAnswer gold_answer, ExactAnswer system_answer, CalculatedMeasures exact_answers) {
if(system_answer==null)
return;
ArrayList<String> answers_golden = gold_answer.getAnswers();
ArrayList<String> answers_system = system_answer.getAnswers();
if(answers_system.isEmpty()||answers_golden.isEmpty())
{
exact_answers.setStrict_accuracy(0.0);
return;
}
if(answers_system.get(0).equals(answers_golden.get(0)))
exact_answers.setStrict_accuracy(1.0);
}
private void strictAccuracyForLists(ExactAnswer gold_answer, ExactAnswer system_answer, CalculatedMeasures exact_answers) {
if(system_answer==null)
return;
ArrayList<ArrayList<String>> listsOfFactAnswers = system_answer.getLists();
//check for emptyness of list added
if(!listsOfFactAnswers.isEmpty() && gold_answer.containsAnswerSynonym(listsOfFactAnswers.get(0),false)){
exact_answers.setStrict_accuracy(1.0);
return;
}
exact_answers.setStrict_accuracy(0.0);
}
private void lenientAccuracyForLists(ExactAnswer gold_answer, ExactAnswer system_answer, CalculatedMeasures exact_answers) {
if(system_answer==null)
return;
ArrayList<ArrayList<String>> listsOfFactAnswers = system_answer.getLists();
for(ArrayList<String> ans_system : listsOfFactAnswers)
{
if(gold_answer.containsAnswerSynonym(ans_system,false)){
exact_answers.setLenient_accuracy(1.0);
return;
}
}
}
private void lenientAccuracy(ExactAnswer gold_answer, ExactAnswer system_answer, CalculatedMeasures exact_answers) {
if(system_answer==null)
return;
ArrayList<String> answers_golden = gold_answer.getAnswers();
ArrayList<String> answers_system = system_answer.getAnswers();
for(int i=0;i<answers_system.size();i++){
for(int j=0;j<answers_golden.size();j++){
if(answers_system.get(i).equals(answers_golden.get(j)))
{
exact_answers.setLenient_accuracy(1.0);
return;
}
}
}
}
private void meanReciprocalRank(ExactAnswer gold_answer, ExactAnswer system_answer, CalculatedMeasures exact_answers) {
if(system_answer==null)
return;
ArrayList<String> answers_golden = gold_answer.getAnswers();
ArrayList<String> answers_system = system_answer.getAnswers();
for(int i=0;i<answers_system.size();i++){
for(int j=0;j<answers_golden.size();j++){
if(answers_system.get(i).equals(answers_golden.get(j)))
{
exact_answers.setMean_reciprocal_rank(1.0/(double)(i+1));
//System.out.println(1.0/(double)(i+1));
return;
}
}
}
}
private void meanReciprocalRankForLists(ExactAnswer gold_answer, ExactAnswer system_answer, CalculatedMeasures exact_answers) {
if(system_answer==null)
return;
ArrayList<ArrayList<String>> listsOfFactAnswers = system_answer.getLists();
for(int i=0;i<listsOfFactAnswers.size();i++)
{
if(gold_answer.containsAnswerSynonym(listsOfFactAnswers.get(i),false)){
exact_answers.setMean_reciprocal_rank(1.0/(double)(i+1));
return;
}
}
}
public double getPrecisionEA()
{
return exact_answers.getPrecision();
}
public double getRecallEA()
{
return exact_answers.getRecall();
}
public double getF1EA()
{
return exact_answers.getFmeasure();
}
public double getAccuracyYesNo() {
return exact_answers.getAccuracy();
}
public double getStrictAccuracy()
{
return exact_answers.getStrict_accuracy();
}
public double getLenientAccuracy()
{
return exact_answers.getLenient_accuracy();
}
public double getMRR()
{
return exact_answers.getMean_reciprocal_rank();
}
// ** Get and Set methods **
/**
* Get question id
* @return question id
*/
public String getQuestionID()
{
return question_id;
}
public int getQuestion_type() {
return question_type;
}
public double getConceptsPrecision()
{
return concepts.getPrecision();
}
public double getConceptsRecall()
{
return concepts.getRecall();
}
public double getConceptsF1()
{
return concepts.getFmeasure();
}
public double getArticlesPrecision()
{
return articles.getPrecision();
}
public double getArticlesRecall()
{
return articles.getRecall();
}
public double getArticlesF1()
{
return articles.getFmeasure();
}
public double getSnippetsPrecision()
{
return snippets.getPrecision();
}
public double getSnippetsRecall()
{
return snippets.getRecall();
}
public double getSnippetsF1()
{
return snippets.getFmeasure();
}
public double getTriplesPrecision()
{
return triples.getPrecision();
}
public double getTriplesRecall()
{
return triples.getRecall();
}
public double getTriplesF1()
{
return triples.getFmeasure();
}
public boolean hasQuestionConcepts()
{
return has_concepts;
}
public double getAveragePrecisionConcepts()
{
return concepts.getAverage_precision();
}
public double getAveragePrecisionDocuments()
{
return articles.getAverage_precision();
}
public double getAveragePrecisionTriples()
{
return triples.getAverage_precision();
}
public double getAveragePrecisionSnippets()
{
return snippets.getAverage_precision();
}
public double getF1Snippets()
{
return snippets.getFmeasure();
}
}
| 27,544 | 34.772727 | 212 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/stats/WilcoxonSignedRanksTest.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package stats;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.logging.Level;
import java.util.logging.Logger;
public class WilcoxonSignedRanksTest {
private double MIN=-1.0;
public WilcoxonSignedRanksTest()
{
}
public void test()
{
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
String s;
int i=0;
double values1[] = new double[6];
double values2[] = new double[6];
try {
while ((s = in.readLine()) != null && s.length() != 0){
String values[] = s.split("\\s+");
values1[i] = Double.parseDouble(values[0]);
values2[i] = Double.parseDouble(values[1]);
i++;
//System.out.println(s);
}
performTest(values1, values2);
} catch (IOException ex) {
Logger.getLogger(WilcoxonSignedRanksTest.class.getName()).log(Level.SEVERE, null, ex);
}
}
public int [] nextMinIndexes(double []acc,double min)
{
int [] ids=new int[acc.length];
int j=0;
double m=1.0;
for(int i=0;i<acc.length;i++)
acc[i] = Math.abs(acc[i]);
// Find the min
for(int i=0;i<acc.length;i++)
if(acc[i]>min && m>acc[i])
m = acc[i];
for(int i=0;i<acc.length;i++)
if(Math.abs(acc[i])==m)
{
ids[j]=i;
j++;
}
int [] rids = new int[j];
for(int i=0;i<j;i++)
{
rids[i] = ids[i];
}
MIN = acc[rids[0]];
//System.exit(0);
return rids;
}
public double [] performTest(double [] m1, double [] m2)
{
int l = m1.length;
int l2 = m2.length;
if(l != l2)
{
System.out.println("The tables are not equalsized");
System.exit(0);
}
int max_rank = l;
int rank=1;
int [] ids = new int[l];
double mean_rank;
double [] dif = new double[l];
double [] ranks = new double[l];
for(int i=0;i<l;i++)
dif[i] = m1[i]-m2[i];
// System.out.println(Arrays.toString(dif));
while(rank<=max_rank)
{
ids = nextMinIndexes(dif.clone(),MIN);
mean_rank = 0.0;
for(int j=0;j<ids.length;j++)
{
mean_rank = mean_rank + rank;
rank+=1;
}
mean_rank/=ids.length;
for(int j=0;j<ids.length;j++)
{
mean_rank = Math.round(mean_rank*1000)/1000.0;
ranks[ids[j]]=mean_rank;
}
}
double rminus=0.0;
double rpos = 0.0;
double rzero = 0.0;
double rankzero = 0;
int countzeros=0;
for(int i=0;i<l;i++)
{
if(dif[i]<0.0)
rminus+=ranks[i];
else if(dif[i]>0.0)
rpos+=ranks[i];
else
{
rankzero = ranks[i];
rzero += ranks[i];
countzeros+=1;
}
}
if(countzeros%2!=0)
rzero-=rankzero;
rpos+=rzero/2.0;
rminus+=rzero/2.0;
System.out.println("R+: "+rpos+" R-: "+rminus);
System.out.println("T = "+Math.min(rpos,rminus));
return ranks;
}
public static void main(String args[])
{
WilcoxonSignedRanksTest stest = new WilcoxonSignedRanksTest();
stest.test();
}
}
| 3,946 | 20.451087 | 98 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/tools/DrawStatisticsForPubMedData.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package tools;
import com.google.gson.stream.JsonReader;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashSet;
import java.util.Iterator;
import java.util.logging.Level;
import java.util.logging.Logger;
public class DrawStatisticsForPubMedData {
HashSet journalList;
int numeOfArticles=0;
double labelsPerArticle=0.0;
HashSet labelsList;
double labelDensity=0;
HashSet pmids;
public DrawStatisticsForPubMedData()
{
journalList = new HashSet();
labelsList = new HashSet();
pmids = new HashSet();
}
public void parseStream(String jsonFile,String listOfJournals) throws IOException {
String journalName;
int count = 0;
int abstract_count=0;
int duplicates = 0;
try {
JsonReader reader = new JsonReader(new InputStreamReader(new FileInputStream(jsonFile)));
reader.setLenient(true);
reader.beginObject();
reader.skipValue();
//System.out.println(nam);
reader.beginArray();
while (reader.hasNext()) {
reader.beginObject();
this.numeOfArticles++;
while (reader.hasNext()) {
String name = reader.nextName();
if (name.equals("abstractText")) {
abstract_count++;
reader.skipValue();
} else if (name.equals("journal")) {
journalName = reader.nextString();
journalList.add(journalName);
} else if (name.equals("meshMajor")) {
int num_labels = readLabelsArray(reader);
count+=num_labels;
labelDensity += (double)num_labels/26563.0;
} else if (name.equals("pmid")) {
int pmid = reader.nextInt();
if(!pmids.contains(pmid))
pmids.add(pmid);
else
duplicates++;
} else if (name.equals("title")){
reader.skipValue();
}
else if (name.equals("year")){
reader.skipValue();
}
else{
System.out.println(name);
reader.skipValue();
}
}
reader.endObject();
}
reader.endArray();
System.out.println("Abstracts: "+abstract_count);
System.out.println("Duplicates: "+duplicates);
labelsPerArticle = (double)count/(double)numeOfArticles;
labelDensity = labelDensity/(double)numeOfArticles;
exportListOfJournals(listOfJournals);
printStatistics();
} catch (Exception ex) {
System.out.println("Abstracts: "+abstract_count);
System.out.println("Duplicates: "+duplicates);
labelsPerArticle = (double)count/(double)numeOfArticles;
labelDensity = labelDensity/(double)numeOfArticles;
exportListOfJournals(listOfJournals);
printStatistics();
Logger.getLogger(DrawStatisticsForPubMedData.class.getName()).log(Level.SEVERE, null, ex);
}
}
public int readLabelsArray(JsonReader reader){
int count=0;
try{
reader.beginArray();
while (reader.hasNext()) {
String nextString = reader.nextString();
labelsList.add(nextString);
count++;
}
reader.endArray();
}catch(IOException ex){}
return count;
}
private void exportListOfJournals(String listOfJournals) {
BufferedWriter bw=null;
try {
bw = new BufferedWriter(new FileWriter(listOfJournals));
Iterator iter = journalList.iterator();
while(iter.hasNext())
{
String jour = (String)iter.next();
bw.write(jour+"\n");
}
bw.close();
} catch (IOException ex) {
Logger.getLogger(DrawStatisticsForPubMedData.class.getName()).log(Level.SEVERE, null, ex);
}
}
private void printStatistics() {
System.out.println("Number of articles: "+numeOfArticles);
System.out.println("Labels per article: "+labelsPerArticle);
System.out.println("Number of labels: "+labelsList.size());
System.out.println("Density: "+labelDensity);
}
public static void main(String args[])
{
DrawStatisticsForPubMedData ds = new DrawStatisticsForPubMedData();
try {
ds.parseStream(args[0], args[1]);
} catch (IOException ex) {
Logger.getLogger(DrawStatisticsForPubMedData.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
| 6,069 | 32.351648 | 102 |
java
|
biobert-pytorch
|
biobert-pytorch-master/question-answering/scripts/bioasq_eval/flat/BioASQEvaluation/src/tools/MathUtils.java
|
/*
* Copyright 2013,2014 BioASQ project: FP7/2007-2013, ICT-2011.4.4(d),
* Intelligent Information Management,
* Targeted Competition Framework grant agreement n° 318652.
* www: http://www.bioasq.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @author Ioannis Partalas
*/
package tools;
import java.util.ArrayList;
public class MathUtils{
public static double SMALL = 1e-6;
private MathUtils(){};
public static double average(double values[])
{
int s = values.length;
if(s==0)
return Double.MIN_VALUE;
double avg = 0.0;
for(int i=0;i<s;i++)
avg+=values[i];
return avg/s;
}
public static double max( double[] arr ) {
double max = -Double.MAX_VALUE;
int length = arr.length;
for (int i=0; i < length; ++i)
if (arr[i] > max)
max = arr[i];
return max;
}
public static double min( double[] arr ) {
double min = Double.MAX_VALUE;
int length = arr.length;
for (int i=0; i < length; ++i)
if (arr[i] < min)
min = arr[i];
return min;
}
public static int min( int[] arr ) {
int min = Integer.MAX_VALUE;
int length = arr.length;
for (int i=0; i < length; ++i)
if (arr[i] < min)
min = arr[i];
return min;
}
public static int min( ArrayList<Integer> arr ) {
int min = Integer.MAX_VALUE;
int length = arr.size();
for (int i=0; i < length; ++i)
if (arr.get(i).intValue() < min)
min = arr.get(i).intValue();
return min;
}
/**
* Returns the range of the data in the specified array.
* Range is the difference between the maximum and minimum
* values in the data set.
*
* @param arr An array of sample data values.
* @return The range of the data in the input array.
**/
public static double range(double[] arr) {
return max(arr) - min(arr);
}
/**
* Returns the root mean square of an array of sample data.
*
* @param arr An array of sample data values.
* @return The root mean square of the sample data.
**/
public static double rms( double[] arr ) {
int size = arr.length;
double sum = 0;
for (int i=0; i < size; ++i)
sum += arr[i]*arr[i];
return Math.sqrt(sum/size);
}
public static double variance(double[] arr) {
int n = arr.length;
if (n < 2)
return 0;
//throw new IllegalArgumentException("Must be at least 2 elements in array.");
// 1st get the average of the data.
double ave = average(arr);
double var = 0;
double ep = 0;
for (int i=0; i < n; ++i) {
double s = arr[i] - ave;
ep += s;
var += s*s;
}
var = (var - ep*ep/n)/(n-1);
return var;
}
public static double variance(double[] arr,double average) {
int n = arr.length;
if (n < 2)
return 0;
//throw new IllegalArgumentException("Must be at least 2 elements in array.");
// 1st get the average of the data.
double ave = average;
double var = 0;
double ep = 0;
for (int i=0; i < n; ++i) {
double s = arr[i] - ave;
ep += s;
var += s*s;
}
var = (var - ep*ep/n)/(n-1);
return var;
}
/**
* Returns the standard deviation of an array of sample data.
*
* @param arr An array of sample data values.
* @return The standard deviation of the sample data.
**/
public static double sdev(double[] arr) {
return Math.sqrt(variance(arr));
}
public static double dif(double Dx, double Dy)
{
return Dx/Dy;
}
public static int indexOf(double[] values,double val)
{
for (int i = 0; i < values.length; i++) {
if(values[i]==val)
return i;
}
return -1;
}
public static int indexOf(int[] values,int val)
{
for (int i = 0; i < values.length; i++) {
if(values[i]==val)
return i;
}
return -1;
}
public static /*@pure@*/ int maxIndex(double[] doubles) {
double maximum = 0;
int maxIndex = 0;
for (int i = 0; i < doubles.length; i++) {
if ((i == 0) || (doubles[i] > maximum)) {
maxIndex = i;
maximum = doubles[i];
}
}
return maxIndex;
}
public static /*@pure@*/ int maxIndex(int[] ints) {
int maximum = 0;
int maxIndex = 0;
for (int i = 0; i < ints.length; i++) {
if ((i == 0) || (ints[i] > maximum)) {
maxIndex = i;
maximum = ints[i];
}
}
return maxIndex;
}
public static double sum(double[] sim_values) {
double sum=0.0;
for(int i=0;i<sim_values.length;i++)
sum+=sim_values[i];
return sum;
}
public static void normalize(double[] doubles, double sum) {
if (Double.isNaN(sum)) {
throw new IllegalArgumentException("Can't normalize array. Sum is NaN.");
}
if (sum == 0) {
// Maybe this should just be a return.
throw new IllegalArgumentException("Can't normalize array. Sum is zero.");
}
for (int i = 0; i < doubles.length; i++) {
doubles[i] /= sum;
}
}
public static /*@pure@*/ int[] stableSort(double[] array){
int[] index = new int[array.length];
int[] newIndex = new int[array.length];
int[] helpIndex;
int numEqual;
array = (double[])array.clone();
for (int i = 0; i < index.length; i++) {
index[i] = i;
if (Double.isNaN(array[i])) {
array[i] = Double.MAX_VALUE;
}
}
quickSort(array,index,0,array.length-1);
// Make sort stable
int i = 0;
while (i < index.length) {
numEqual = 1;
for (int j = i+1; ((j < index.length) && eq(array[index[i]],array[index[j]])); j++)
numEqual++;
if (numEqual > 1) {
helpIndex = new int[numEqual];
for (int j = 0; j < numEqual; j++)
helpIndex[j] = i+j;
quickSort(index, helpIndex, 0, numEqual-1);
for (int j = 0; j < numEqual; j++)
newIndex[i+j] = index[helpIndex[j]];
i += numEqual;
} else {
newIndex[i] = index[i];
i++;
}
}
return newIndex;
}
public static /*@pure@*/ boolean eq(double a, double b){
return (a - b < SMALL) && (b - a < SMALL);
}
/**
* Implements quicksort according to Manber's "Introduction to
* Algorithms".
*
* @param array the array of integers to be sorted
* @param index the index into the array of integers
* @param left the first index of the subset to be sorted
* @param right the last index of the subset to be sorted
*/
//@ requires 0 <= first && first <= right && right < array.length;
//@ requires (\forall int i; 0 <= i && i < index.length; 0 <= index[i] && index[i] < array.length);
//@ requires array != index;
// assignable index;
private static void quickSort(/*@non_null@*/ int[] array, /*@non_null@*/ int[] index, int left, int right) {
if (left < right) {
int middle = partition(array, index, left, right);
quickSort(array, index, left, middle);
quickSort(array, index, middle + 1, right);
}
}
/**
* Implements quicksort according to Manber's "Introduction to
* Algorithms".
*
* @param array the array of doubles to be sorted
* @param index the index into the array of doubles
* @param left the first index of the subset to be sorted
* @param right the last index of the subset to be sorted
*/
//@ requires 0 <= first && first <= right && right < array.length;
//@ requires (\forall int i; 0 <= i && i < index.length; 0 <= index[i] && index[i] < array.length);
//@ requires array != index;
// assignable index;
private static void quickSort(/*@non_null@*/ double[] array, /*@non_null@*/ int[] index, int left, int right) {
if (left < right) {
int middle = partition(array, index, left, right);
quickSort(array, index, left, middle);
quickSort(array, index, middle + 1, right);
}
}
/**
* Partitions the instances around a pivot. Used by quicksort and
* kthSmallestValue.
*
* @param array the array of doubles to be sorted
* @param index the index into the array of doubles
* @param l the first index of the subset
* @param r the last index of the subset
*
* @return the index of the middle element
*/
private static int partition(double[] array, int[] index, int l, int r) {
double pivot = array[index[(l + r) / 2]];
int help;
while (l < r) {
while ((array[index[l]] < pivot) && (l < r)) {
l++;
}
while ((array[index[r]] > pivot) && (l < r)) {
r--;
}
if (l < r) {
help = index[l];
index[l] = index[r];
index[r] = help;
l++;
r--;
}
}
if ((l == r) && (array[index[r]] > pivot)) {
r--;
}
return r;
}
private static int partition(int[] array, int[] index, int l, int r) {
double pivot = array[index[(l + r) / 2]];
int help;
while (l < r) {
while ((array[index[l]] < pivot) && (l < r)) {
l++;
}
while ((array[index[r]] > pivot) && (l < r)) {
r--;
}
if (l < r) {
help = index[l];
index[l] = index[r];
index[r] = help;
l++;
r--;
}
}
if ((l == r) && (array[index[r]] > pivot)) {
r--;
}
return r;
}
public static /*@pure@*/ int round(double value) {
int roundedValue = value > 0
? (int)(value + 0.5)
: -(int)(Math.abs(value) + 0.5);
return roundedValue;
}
}
| 11,015 | 26.402985 | 115 |
java
|
null |
aspiration-study-main/code/org/sas/benchmark/req/spo/AutoRun.java
|
package org.sas.benchmark.req.spo;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.math.BigDecimal;
import org.femosaa.core.SASAlgorithmAdaptor;
import org.femosaa.seed.FixedSeeder;
import org.ssase.requirement.froas.RequirementPrimitive;
import org.ssase.requirement.froas.RequirementProposition;
import org.ssase.requirement.froas.SimpleRequirementProposition;
import org.ssase.util.Repository;
/**
*
* SS-N u=0.0005
* DiatomSizeReduction u=0.05
* Coffee u=0.001
*
* *SS-M m=500, pop=10(50), gen=500, no seed
* *SS-N m=1500, pop=50, gen=500, all seed
* *SS-K m=500, pop=50, gen=500, no seed
* *SS-J m=700, pop=30, gen=500, no seed
* *Adiac m=700, pop=50, gen=500, no seed (m=1000, pop=30, gen=500, no seed)
* *DiatomSizeReduction - m=500, pop=60, gen=500, no seed
* *ShapesAll - m=500, pop=60, gen=500, no seed
* *xgboost4096 m=300, pop=30, gen=500, no seed
*
* CONEX m=1000, pop=10, gen=500, all seed
* feature6 m=300, pop=10, gen=500, no seed
*
*
* feature8
*
*
* LSTM -
* @author
*
*/
public class AutoRun {
/*
* new double[] { 500, 1000, 1500, 2000, 2500 }; new double[] { 0.1, 0.8, 5,
* 15, 40}; new double[] { 2000, 3300, 5000, 10000, 17000}; new double[] {
* 180, 220, 230, 250, 280}; new double[] { 11, 13, 14.5, 15.5, 18}; new
* double[] { 230, 400, 600, 800, 1000};
*/
// private static double[] ds = new double[] { 2000, 3300, 5000, 10000,
// 17000};
//private static String[] weights = new String[] { "1.0-0.0", "0.0-1.0" };
//private static String[] single_algs = new String[] { "ga" };
//private static String[] d_pair = new String[] { "0.1-0.1","0.1-0.1","0.1-0.1" };
public static String[] propositions = new String[] { "p0-p0","p0-p1","p1-p0","p0-p2","p2-p0","p0-p3","p3-p0","p1-p1","p2-p2","p3-p3","p1-p2","p2-p1","p1-p3","p3-p1","p2-p3","p3-p2"};
//private static String[] d_pair = new String[] { "0.1-0.1" };
//private static String[] propositions = new String[] { "p1-p1","p2-p2","p3-p3","p1-p2","p2-p1","p1-p3","p3-p1","p2-p3","p3-p2" };
public static String[] multi_algs = new String[] { /*"nsgaii", "ibea",*/ "moead" };
private static String benchmark = "Adiac";
public static void main(String[] args) {
// double l = Double.MAX_VALUE/0.001;
// 3.0,48.89200000000001 3.0,33.305
// System.out.print(l > 1);
// if(1==1) return;
/*if(benchmark.equals("SS-N")) {
Parser.u_threshold = 0.00001;
} else if(benchmark.equals("DiatomSizeReduction")) {
Parser.u_threshold = 0.05;
} else if(benchmark.equals("Coffee")) {
Parser.u_threshold = 0.001;
} else if(benchmark.equals("CONEX")) {
Parser.u_threshold = 0.01;
} else if(benchmark.equals("Adiac")) {
Parser.u_threshold = 0.001;
} else if(benchmark.equals("ShapesAll")) {
Parser.u_threshold = 0.001;
} else if(benchmark.equals("Wafer")) {
Parser.u_threshold = 0.001;
} else if(benchmark.equals("xgboost4096")) {
Parser.u_threshold = 0.01;
} else if(benchmark.equals("LSTM")) {
Parser.u_threshold = 0.01;
} else if(benchmark.equals("SS-L")) {
Parser.u_threshold = 0.01;
} else if(benchmark.equals("SS-C")) {
Parser.u_threshold = 0.001;
} else if(benchmark.equals("SS-O")) {
Parser.u_threshold = 0.01;
}*/
Parser.selected = benchmark;
Simulator.setup();
SASAlgorithmAdaptor.isFuzzy = true;
SASAlgorithmAdaptor.isSeedSolution = false;
boolean runUnrealistic = true;
//SASAlgorithmAdaptor.logGenerationOfObjectiveValue = 100;
String[] d_pair = Parser.d_values;
for (String alg : multi_algs) {
for (String p : propositions) {
for (int i = 0; i <d_pair.length;i++) {
// p0-p0 only runs once
if(p.equals("p0-p0") && i == 1) {
break;
}
if(!runUnrealistic && p.equals("p0-p0")) {
break;
}
// unrelastic aspiration only applied on case where all objectives have aspiration
if(p.contains("0") && i == 3) {
continue;
}
if(!runUnrealistic && i == 3) {
continue;
}
/*if(i != 3) {
continue;
}*/
File f = new File("/Users/"+System.getProperty("user.name")+"/research/monitor/ws-soa/sas");
try {
if (f.exists()) {
delete(f);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
RequirementProposition[] rp = getProposition(p,d_pair[i]);
// Will reset so no need to remove
Repository.setRequirementProposition("sas-rubis_software-P1", rp[0]);
Repository.setRequirementProposition("sas-rubis_software-P2", rp[1]);
//System.out.print( d_pair[i]+"\n");
run_MOEA(alg, p, d_pair[i]);
}
}
}
}
public static void run_MOEA(String alg, String p, String d) {
Simulator.alg = alg;
Simulator.main_test();
if(p.equals("p0-p0")) {
d = "0,0";
}
File source = new File("/Users/"+System.getProperty("user.name")+"/research/monitor/ws-soa/sas");
File r = new File(
"/Users/"+System.getProperty("user.name")+"/research/experiments-data/req-vs-mo/configuration-optimization/"
+ "/" + benchmark + "/" + alg + "/" + p + "/" + d + "/" + "/sas");
File dest = new File(
"/Users/"+System.getProperty("user.name")+"/research/experiments-data/req-vs-mo/configuration-optimization/"
+ "/" + benchmark + "/" + alg + "/" + p + "/" + d + "/" + "/sas");
if (r.exists()) {
System.out.print("Remove " + r + "\n");
try {
delete(r);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
if (!dest.exists()) {
dest.mkdirs();
}
try {
copyFolder(source, dest);
if (source.exists()) {
System.out.print("Remove " + source + "\n");
delete(source);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out
.print("End of "
+ "/Users/"+System.getProperty("user.name")+"/research/experiments-data/req-vs-mo/configuration-optimization/"
+ "/" + benchmark + "/" + alg + "/" + p + "/" + d + "/" + "/sas" + "\n");
File f = new File("/Users/"+System.getProperty("user.name")+"/research/monitor/ws-soa/sas");
try {
if (f.exists()) {
delete(f);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static RequirementProposition[] getProposition(String p, String d) {
String p1 = p.split("-")[0];
String p2 = p.split("-")[1];
String d1 = d.split(",")[0];
String d2 = d.split(",")[1];
RequirementProposition[] rp = new RequirementProposition[2];
//System.out.print(d1 + "**\n");
if("p0".equals(p1)) {
rp[0] =
new SimpleRequirementProposition(RequirementPrimitive.AS_GOOD_AS_POSSIBLE);
} else if("p1".equals(p1)) {
rp[0] =
new SimpleRequirementProposition(new BigDecimal(d1).doubleValue(), RequirementPrimitive.AS_GOOD_AS_POSSIBLE_TO_d);
} else if("p2".equals(p1)) {
rp[0] =
new SimpleRequirementProposition(new BigDecimal(d1).doubleValue(), RequirementPrimitive.BETTER_THAN_d);
} else if("p3".equals(p1)) {
rp[0] =
new SimpleRequirementProposition(new BigDecimal(d1).doubleValue(), RequirementPrimitive.AS_GOOD_AS_POSSIBLE,
RequirementPrimitive.BETTER_THAN_d);
}
if("p0".equals(p2)) {
rp[1] =
new SimpleRequirementProposition(RequirementPrimitive.AS_GOOD_AS_POSSIBLE);
} else if("p1".equals(p2)) {
rp[1] =
new SimpleRequirementProposition(new BigDecimal(d2).doubleValue(), RequirementPrimitive.AS_GOOD_AS_POSSIBLE_TO_d);
} else if("p2".equals(p2)) {
rp[1] =
new SimpleRequirementProposition(new BigDecimal(d2).doubleValue(), RequirementPrimitive.BETTER_THAN_d);
} else if("p3".equals(p2)) {
rp[1] =
new SimpleRequirementProposition(new BigDecimal(d2).doubleValue(), RequirementPrimitive.AS_GOOD_AS_POSSIBLE,
RequirementPrimitive.BETTER_THAN_d);
}
return rp;
}
public static void copyFolder(File src, File dest) throws IOException {
if (src.isDirectory()) {
// if directory not exists, create it
if (!dest.exists()) {
dest.mkdir();
System.out.println("Directory copied from " + src + " to "
+ dest);
}
// list all the directory contents
String files[] = src.list();
for (String file : files) {
// construct the src and dest file structure
File srcFile = new File(src, file);
File destFile = new File(dest, file);
// recursive copy
copyFolder(srcFile, destFile);
}
} else {
// if file, then copy it
// Use bytes stream to support all file types
InputStream in = new FileInputStream(src);
OutputStream out = new FileOutputStream(dest);
byte[] buffer = new byte[1024];
int length;
// copy the file content in bytes
while ((length = in.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
in.close();
out.close();
System.out.println("File copied from " + src + " to " + dest);
}
}
public static void delete(File file) throws IOException {
if (file.isDirectory()) {
// directory is empty, then delete it
if (file.list().length == 0) {
file.delete();
// System.out.println("Directory is deleted : "
// + file.getAbsolutePath());
} else {
// list all the directory contents
String files[] = file.list();
for (String temp : files) {
// construct the file structure
File fileDelete = new File(file, temp);
// recursive delete
delete(fileDelete);
}
// check the directory again, if empty then delete it
if (file.list().length == 0) {
file.delete();
// System.out.println("Directory is deleted : "
// + file.getAbsolutePath());
}
}
} else {
// if file, then delete it
file.delete();
// System.out.println("File is deleted : " +
// file.getAbsolutePath());
}
}
}
| 10,054 | 27.244382 | 183 |
java
|
null |
aspiration-study-main/code/org/sas/benchmark/req/spo/BenchmarkDelegate.java
|
package org.sas.benchmark.req.spo;
import java.util.HashMap;
import org.ssase.model.Delegate;
public class BenchmarkDelegate implements Delegate{
private int obj_index = 0;
public BenchmarkDelegate(int obj_index) {
super();
this.obj_index = obj_index;
}
public double predict(double[] xValue) {
String v = "";
for(int i = 0; i < xValue.length; i++) {
v += v.equals("")? (int)xValue[i] : ":" + (int)xValue[i];
}
ProcessBuilder processBuilder = new ProcessBuilder();
// Windows
processBuilder.command("sudo sh", prefix + "system-interface", v);
processBuilder.redirectErrorStream(true);
double r = 0.0;
try {
Process process = processBuilder.start();
int exitCode = process.waitFor();
BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
String line;
while ((line = reader.readLine()) != null) {
r = Double.parseDouble(line);
break;
}
} catch (Exception e) {
e.printStackTrace();
}
return r;
}
@Override
public double predict2(double[] xValue) {
String v = "";
for(int i = 0; i < xValue.length; i++) {
v += v.equals("")? (int)xValue[i] : ":" + (int)xValue[i];
}
HashMap<String, Double> map = obj_index == 0? Parser.map1 : Parser.map2;
//if(map.containsKey(v)) {
// System.out.print(map.containsKey(v) + ": " + v + "***\n");
//}
if(map.containsKey(v)) {
double r = map.get(v);
if(r == 0) {
return Double.MAX_VALUE;
}
// Only needed for certain benchmarks
if(obj_index == 0) {
r = 1.0/r;
}
return r*100;
} else {
//System.out.print("cannot found " +v+"\n");
return Double.MAX_VALUE;
}
}
}
| 1,717 | 17.673913 | 95 |
java
|
null |
aspiration-study-main/code/org/sas/benchmark/req/spo/LatexRunner.java
|
package org.sas.benchmark.req.spo;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import org.sas.benchmark.pw.Data.Pack;
public class LatexRunner {
static String prefix = "/Users/"+System.getProperty("user.name")+"/research/potential-publications/w-vs-wo-req/supplementary/materials/";
static String prefix_output = "/Users/"+System.getProperty("user.name")+"/research/potential-publications/w-vs-wo-req/supplementary/trash/";
static String prefix_pdf_output = "/Users/"+System.getProperty("user.name")+"/research/potential-publications/w-vs-wo-req/supplementary/pdf/";
// static String f = "w-vs-p.tex";
/**
* @param args
*/
// pdflatex -synctex=1 -interaction=nonstopmode --shell-escape w-vs-p.tex
public static void run(String f) {
ProcessBuilder processBuilder = new ProcessBuilder();
// Windows
processBuilder.command("/Library/TeX/texbin/pdflatex", "-synctex=1",
"-interaction=nonstopmode", "--shell-escape",
"-output-directory=" + prefix_output, prefix + f + ".tex");
try {
File fi = new File(prefix_output);
if(!fi.exists()) {
fi.mkdir();
}
Process process = processBuilder.start();
BufferedReader reader = new BufferedReader(new InputStreamReader(
process.getInputStream()));
String line;
while ((line = reader.readLine()) != null) {
System.out.println(line);
}
int exitCode = process.waitFor();
System.out.println("\nExited with error code : " + exitCode);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public static void copyFolder(String f) throws IOException {
File src = new File(prefix_output + f + ".pdf");
File dest = new File(
prefix_pdf_output
+ f + ".pdf");
File fif = new File(prefix_pdf_output);
if(!fif.exists()) {
fif.mkdir();
}
if(dest.exists()) {
dest.delete();
}
if (src.isDirectory()) {
} else {
// if file, then copy it
// Use bytes stream to support all file types
InputStream in = new FileInputStream(src);
OutputStream out = new FileOutputStream(dest);
byte[] buffer = new byte[1024];
int length;
// copy the file content in bytes
while ((length = in.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
in.close();
out.close();
System.out.println("File copied from " + src + " to " + dest);
File fi = new File(prefix_output);
delete(fi);
}
}
/*public static void generateFile() {
String eval = "";
//String time = "\begin{figure*}[t!]\n" + "\\centering\n";
for (Pack p : Data.packs) {
for (int i = 0; i < Data.weights.length; i ++) {
String s = Data.weights[i];
if (i % 3 == 0) {
eval += "\\begin{figure*}[h]\n" + "\\centering\n";
}
eval += "\\begin{subfigure}[h]{0.3\\textwidth}\n" +
"\\includegraphics[width=\\textwidth]{pdf/{"+Data.nameMap.get(p.getBenchmark())+"="+s+"=eval}.pdf}\n" +
"\\subcaption{\\textsc{"+Data.nameMap.get(p.getBenchmark()) + "}, [" + s.split("-")[0] + "," + s.split("-")[1] + "]" +"}\n" +
"\\end{subfigure}\n" + ((i + 1) % 3 == 0 ? "" : "~\n");
if ((i+1) % 3 == 0) {
eval += "\\caption{Convergence under equal number of evaluations for \\textsc{" + Data.nameMap.get(p.getBenchmark()) + "}.}\n" +
"\\end{figure*}\n";
//eval += "\\caption{Convergence under equal running time for \\textsc{" + Data.nameMap.get(p.getBenchmark()) + "}.}\n" +
//"\\end{figure*}\n";
}
}
}
//eval += "\\caption{Convergence under equal number of evaluations.}\n" +
//"\\end{figure*}\n";
System.out.print(eval);
//"\\caption{Convergence under equal running time.}\n"
}*/
public static void main (String[] args) {
//generateFile();
}
public static void delete(File file) throws IOException {
if (file.isDirectory()) {
// directory is empty, then delete it
if (file.list().length == 0) {
file.delete();
// System.out.println("Directory is deleted : "
// + file.getAbsolutePath());
} else {
// list all the directory contents
String files[] = file.list();
for (String temp : files) {
// construct the file structure
File fileDelete = new File(file, temp);
// recursive delete
delete(fileDelete);
}
// check the directory again, if empty then delete it
if (file.list().length == 0) {
file.delete();
// System.out.println("Directory is deleted : "
// + file.getAbsolutePath());
}
}
} else {
// if file, then delete it
file.delete();
// System.out.println("File is deleted : " +
// file.getAbsolutePath());
}
}
}
| 4,854 | 25.530055 | 143 |
java
|
null |
aspiration-study-main/code/org/sas/benchmark/req/spo/Parser.java
|
package org.sas.benchmark.req.spo;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import org.femosaa.core.EAConfigure;
/**
*
* For comparing pareto and weight
*
*
* 0,1,0,0,1,3,0.6,23,3,250,100,4,250,23,40,0,1.4,47.112,7711.976945
* 1:1:0:1:1:1:1:1:0:1:0:0:0:1:0:0:0:1:1:1:0:1:0:1:0:0:1:1:0:0:1:0:0:1:1:0:1:1:0:1:0:1:1:1:1:1:1:1:1:1:1:0:1:9:5:2:4:2:29
* 0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:0:4:8:6:3:5:7:8
* @author tao
*
*/
public class Parser {
//public static String[] keepZero = {"BDBCAll","BDBJAll","X264All"};
// two objectives
public static LinkedHashMap<String, Double> map1 = new LinkedHashMap<String, Double>();
public static LinkedHashMap<String, Double> map2 = new LinkedHashMap<String, Double>();
public static List<String> seeds = new ArrayList<String>();
public static double u_threshold = 0.0001;
public static String[] d_values = null;
public static String selected = "SS-N";
public static double l1_t = 0.2;
public static double l2_t = 0.9;
public static double h1_t = 0.9;
public static double h2_t = 0.1;
public static double m1_t = 0.5;
public static double m2_t = 0.5;
public static double ex1_v = 0.5;
public static double ex2_v = 0.5;
public static double[] d1;
public static double[] d2;
//x264 Best 244.23Worst 821.963
// sql Best 12.513Worst 16.851
public static void main( String[] args )
{
/*org.femosaa.util.HV hv = new org.femosaa.util.HV();
double[][] f1 = new double[][]{{0.0,1.0}, {0.0,0.0}, {0.0,0.0}};
double[][] f2 = new double[][]{{800,800},{700,900}};
System.out.print(hv.hypervolume(f2));
if(1==1) return;*/
map1.clear();
map2.clear();
seeds.clear();
d_values = null;
if(selected.equals("CONEX")) {
readConex();
} else if(selected.equals("LSTM")) {
readLSTM();
} else {
read(selected);
}
}
public static void readLSTM(){
// We only need to get rid of the mandatory one or those that do not change at all.
ArrayList<ArrayList<Double>> list = new ArrayList<ArrayList<Double>>();
String[] names = null;
double time = 0.0;
File fi = new File("/Users/"+System.getProperty("user.name")+"/research/experiments-data/public-data/performance/flash-data/Flash-MultiConfig/Data/LSTM/");
for(File f : fi.listFiles()) {
try {
BufferedReader reader = new BufferedReader(new FileReader("/Users/"+System.getProperty("user.name")+"/research/experiments-data/public-data/performance/flash-data/Flash-MultiConfig/Data/LSTM/"+f.getName()));
String line = null;
if(f.getName().endsWith("1.csv")) {
int o = 0;
while ((line = reader.readLine()) != null) {
if(line.contains("$") || o==0) {
String[] dd = line.split(",");
names = dd;
for(String s : dd) {
System.out.print("\"" + s + "\",\n");
}
o++;
continue;
}
String r = "";
String[] data = line.split(",");
int k = 0;
int index = 0;
//System.out.print( data.length+"**\n");
for(int i = 0; i < data.length+3; i++) {
if(i == 6 || i ==7) {
index++;
continue;
}
if(list.size() < data.length+1) {
list.add(new ArrayList<Double>());
}
ArrayList<Double> subList = list.get(k);
if(i == 2 || i == 3 || i == 4) {
if(!subList.contains(0.0)) {
subList.add(0.0);
}
k++;
continue;
}
///r += r.equals("")? data[i] : ":" + data[i];
double v = 0.0;
if("L1".equals(data[index])) {
v = 0.0;
} else {
v = Double.parseDouble(data[index].replace("\"[", "").replace("]\"", ""));
}
if(!subList.contains(v)) {
//System.out.print(v+"**\n");
subList.add(v);
}
k++;
index++;
}
}
} else if(f.getName().endsWith("2.csv")) {
int o = 0;
while ((line = reader.readLine()) != null) {
if(line.contains("$") || o==0) {
String[] dd = line.split(",");
names = dd;
for(String s : dd) {
System.out.print("\"" + s + "\",\n");
}
o++;
continue;
}
String r = "";
String[] data = line.split(",");
int k = 0;
int index = 0;
for(int i = 0; i < data.length+2; i++) {
if(i == 6 || i ==7) {
index++;
continue;
}
if(list.size() < data.length) {
list.add(new ArrayList<Double>());
}
ArrayList<Double> subList = list.get(k);
if(i == 3 || i == 4) {
if(!subList.contains(0.0)) {
subList.add(0.0);
}
k++;
continue;
}
///r += r.equals("")? data[i] : ":" + data[i];
double v = 0.0;
if("L2".equals(data[index])) {
v = 1.0;
} else {
v = Double.parseDouble(data[index].replace("\"[", "").replace("]\"", ""));
}
if(!subList.contains(v)) {
subList.add(v);
}
k++;
index++;
}
}
} else if(f.getName().endsWith("3.csv")) {
int o = 0;
while ((line = reader.readLine()) != null) {
if(line.contains("$") || o==0) {
String[] dd = line.split(",");
names = dd;
for(String s : dd) {
System.out.print("\"" + s + "\",\n");
}
o++;
continue;
}
String r = "";
String[] data = line.split(",");
int k = 0;
int index = 0;
for(int i = 0; i < data.length+1; i++) {
if(i == 6 || i ==7) {
index++;
continue;
}
if(list.size() < data.length-1) {
list.add(new ArrayList<Double>());
}
ArrayList<Double> subList = list.get(k);
if(i == 4) {
if(!subList.contains(0.0)) {
subList.add(0.0);
}
k++;
continue;
}
///r += r.equals("")? data[i] : ":" + data[i];
double v = 0.0;
if("L3".equals(data[index])) {
v = 2.0;
} else {
v = Double.parseDouble(data[index].replace("\"[", "").replace("]\"", ""));
}
if(!subList.contains(v)) {
subList.add(v);
}
k++;
index++;
}
}
} else if(f.getName().endsWith("4.csv")) {
int o = 0;
while ((line = reader.readLine()) != null) {
if(line.contains("$") || o==0) {
String[] dd = line.split(",");
names = dd;
for(String s : dd) {
System.out.print("\"" + s + "\",\n");
}
o++;
continue;
}
String r = "";
String[] data = line.split(",");
int k = 0;
int index = 0;
for(int i = 0; i < data.length; i++) {
if(i == 6 || i ==7) {
index++;
continue;
}
if(list.size() < data.length-2) {
list.add(new ArrayList<Double>());
}
ArrayList<Double> subList = list.get(k);
/*if(i == 4) {
if(!subList.contains(0.0)) {
subList.add(0.0);
}
continue;
}*/
///r += r.equals("")? data[i] : ":" + data[i];
double v = 0.0;
if("L4".equals(data[index])) {
v = 3.0;
} else {
v = Double.parseDouble(data[index].replace("\"[", "").replace("]\"", ""));
}
if(!subList.contains(v)) {
subList.add(v);
}
k++;
index++;
}
}
}
reader.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
HashSet<Integer> set = new HashSet<Integer>();
for(int i = 0; i < list.size(); i++) {
ArrayList<Double> subList = list.get(i);
// means it cannot be changed and has no variability
if (subList.size() == 1) {
set.add(i);
} else {
double[] d = new double[subList.size()];
for(int j = 0; j < subList.size(); j++) {
d[j] = subList.get(j);
}
Arrays.sort(d);
subList.clear();
for(int j = 0; j < d.length; j++) {
subList.add((Double)d[j]);
System.out.print("Oringal index: " + i + "=" + d[j] + "\n");
}
}
}
names = new String[] {"a","b","c","d","e","f","size","arch","link"};
for(int i = 0; i < list.size(); i++) {
if(!set.contains(i)) {
System.out.print("<item name=\""+ names[i] +"\" provision=\"0\" constraint=\"-1\" differences=\"1\" pre_to_max=\"0.7\" pre_of_max=\"0.1\" min=\"0\" max=\""+(list.get(i).size()-1)+"\" price_per_unit=\"0.5\" />\n");
}
}
for(int i = 0; i < list.size(); i++) {
if(!set.contains(i)) {
if(list.get(i).size() <= 2) {
System.out.print("<feature name=\""+names[i]+"\" type=\"categorical\" optional=\"true\"/>\n");
} else {
System.out.print("<feature name=\""+names[i]+"\" type=\"numeric\" range=\"0 "+(list.get(i).size()-1)+"\" gap=\"1\" />\n");
}
}
}
System.out.print("Unchanged ones: " + set.toString() + "\n");
//if (1==1)return;
HashSet<String> print_out = new HashSet<String>();
List<Double> o1 = new ArrayList<Double>();
List<Double> o2 = new ArrayList<Double>();
for(File f : fi.listFiles()) {
try {
BufferedReader reader = new BufferedReader(new FileReader("/Users/"+System.getProperty("user.name")+"/research/experiments-data/public-data/performance/flash-data/Flash-MultiConfig/Data/LSTM/"+f.getName()));
String line = null;
int o = 0;
while ((line = reader.readLine()) != null) {
if(line.contains("$") || o==0) {
o++;
continue;
}
String r = "";
String[] data = line.split(",");
double v1 = 0;
double v2 = 0;
if(f.getName().endsWith("1.csv")) {
int index = 0;
int k = 0;
for(int i = 0; i < data.length + 3; i++) {
if(i == 6 || i ==7) {
index++;
continue;
}
if(i == 2 || i == 3 || i == 4) {
r += ":" + "0";
k++;
continue;
}
if(!set.contains(i)) {
ArrayList<Double> subList = list.get(k);
String s = data[index].replace("\"[", "").replace("]\"", "");
if("L1".equals(s)) {
s = "0";
}
int v = subList.indexOf(Double.parseDouble(s));
r += r.equals("")? v : ":" + v;
}
k++;
index++;
}
v1 = "nan".equals(data[3]) ? 0.0 : Double.valueOf(data[3]);
v2 = "nan".equals(data[4]) ? 0.0 : Double.valueOf(data[4]);
} else if(f.getName().endsWith("2.csv")) {
int index = 0;
int k = 0;
for(int i = 0; i < data.length + 2; i++) {
if(i == 6 || i ==7) {
index++;
continue;
}
if(i == 3 || i == 4) {
r += ":" + "0";
k++;
continue;
}
if(!set.contains(i)) {
ArrayList<Double> subList = list.get(k);
String s = data[index].replace("\"[", "").replace("]\"", "");
if("L2".equals(s)) {
s = "1";
}
int v = subList.indexOf(Double.parseDouble(s));
r += r.equals("")? v : ":" + v;
}
k++;
index++;
}
v1 = "nan".equals(data[4]) ? 0.0 : Double.valueOf(data[4]);
v2 = "nan".equals(data[5]) ? 0.0 : Double.valueOf(data[5]);
} else if(f.getName().endsWith("3.csv")) {
int index = 0;
int k = 0;
for(int i = 0; i < data.length + 1; i++) {
if(i == 6 || i ==7) {
index++;
continue;
}
if(i == 4) {
r += ":" + "0";
k++;
continue;
}
if(!set.contains(i)) {
ArrayList<Double> subList = list.get(k);
String s = data[index].replace("\"[", "").replace("]\"", "");
if("L3".equals(s)) {
s = "2";
}
int v = subList.indexOf(Double.parseDouble(s));
r += r.equals("")? v : ":" + v;
}
k++;
index++;
}
v1 = "nan".equals(data[5]) ? 0.0 : Double.valueOf(data[5]);
v2 = "nan".equals(data[6]) ? 0.0 : Double.valueOf(data[6]);
} else if(f.getName().endsWith("4.csv")) {
int index = 0;
int k = 0;
for(int i = 0; i < data.length; i++) {
if(i == 6 || i ==7) {
index++;
continue;
}
if(!set.contains(i)) {
ArrayList<Double> subList = list.get(k);
String s = data[index].replace("\"[", "").replace("]\"", "");
if("L4".equals(s)) {
s = "3";
}
int v = subList.indexOf(Double.parseDouble(s));
r += r.equals("")? v : ":" + v;
}
k++;
index++;
}
v1 = "nan".equals(data[6]) ? 0.0 : Double.valueOf(data[6]);
v2 = "nan".equals(data[7]) ? 0.0 : Double.valueOf(data[7]);
}
if(map1.containsKey(r)) {
System.out.print(line + " : " + r+ ", current " +map1.get(r) +" duplicate\n");
}
seeds.add(r);
if(v1 < 0) {
v1 = Math.abs(v1);
}
if(v2 < 0) {
v2 = Math.abs(v2);
}
map1.put(r, v1);
map2.put(r, v2);
System.out.print(/*line + " : " + */r + "=" + map1.get(r)+ " and " + map2.get(r) +"\n");
if(!"nan".equals(data[data.length-1])) {
//v1 = 1.0/v1;
time += Double.valueOf(data[data.length-1]);
o1.add(v1);
o2.add(v2);
print_out.add("("+v1+","+v2+")");
}
//if(!"nan".equals(data[data.length-1]))
//time += Double.valueOf(data[data.length-1]);
}
System.out.print(map1.size() + "\n");
setAspirationLevel(o1,o2);
//System.out.print("Mean runtime: " + time/map1.size() + "\n");
//getSeeds();
Collections.sort(o1);
Collections.sort(o2);
double t1 = Double.MAX_VALUE;
for (double d : o1) {
if(d < t1 && d != o1.get(0) ) {
t1 = d;
}
}
double t2 = Double.MAX_VALUE;
for (double d : o2) {
if(d < t2 && d != o2.get(0) ) {
t2 = d;
}
}
/*for (String s : print_out) {
System.out.print(s + "\n");
}*/
System.out.print(o1.get(0) - t1 + o1.get(0) + "\n");
System.out.print(o2.get(0) - t2 + o2.get(0));
System.out.print("min1: " + o1.get(0)+ "\n");
System.out.print("min2: " + o2.get(0));
reader.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public static void readConex(){
// We only need to get rid of the mandatory one or those that do not change at all.
ArrayList<ArrayList<String>> list = new ArrayList<ArrayList<String>>();
ArrayList<String> names = new ArrayList<String>();
//String[] names = null;
double time = 0.0;
try {
BufferedReader reader = new BufferedReader(new FileReader("/Users/"+System.getProperty("user.name")+"/research/experiments-data/public-data/performance/flash-data/Flash-MultiConfig/Data/conex-perf.csv"));
String line = null;
int o = 0;
while ((line = reader.readLine()) != null) {
if(o==0) {
String[] dd = line.split(",");
for(int k = 1; k < dd.length; k++) {
if(!dd[k].equals("") && !dd[k].equals("performance")) {
names.add(dd[k]);
System.out.print("\"" + dd[k] + "\",\n");
}
}
o++;
System.out.print("names " + names.size()+"\n");
continue;
}
String r = "";
String[] data = line.split(",");
for(int i = 1; i < data.length - 1; i++) {
///r += r.equals("")? data[i] : ":" + data[i];
if(list.size() <= i-1) {
list.add(new ArrayList<String>());
}
//System.out.print(data.length + " " + list.size() + "\n");
ArrayList<String> subList = list.get(i-1);
if(!subList.contains(data[i])) {
//System.out.print(data.length + " " + list.size() + "\n");
subList.add(data[i]);
}
}
}
reader.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
HashSet<Integer> set = new HashSet<Integer>();
for(int i = 0; i < list.size(); i++) {
ArrayList<String> subList = list.get(i);
// means it cannot be changed and has no variability
if (subList.size() == 1) {
set.add(i);
} else {
/*String[] d = new String[subList.size()];
for(int j = 0; j < subList.size(); j++) {
d[j] = subList.get(j);
}*/
//Arrays.sort(d);
//subList.clear();
for(int j = 0; j < subList.size(); j++) {
//subList.add(d[j]);
System.out.print(names.get(i) + " Oringal index: " + i + "=" + subList.get(j) + "\n");
}
}
//System.out.print(" <feature name=\""+names.get(i)+"\" type=\"numeric\" range=\"0 "+(subList.size()-1)+"\" gap=\"1\"/>\n");
//System.out.print("<item name=\""+names.get(i)+"\" provision=\"0\" constraint=\"-1\" differences=\"1\" pre_to_max=\"0.7\" pre_of_max=\"0.1\" min=\"0\" max=\""+(subList.size()-1)+"\" price_per_unit=\"0.5\" />\n");
}
/*for(int i = 0; i < list.size(); i++) {
if(!set.contains(i)) {
System.out.print("<item name=\""+ names[i] +"\" provision=\"0\" constraint=\"-1\" differences=\"1\" pre_to_max=\"0.7\" pre_of_max=\"0.1\" min=\"0\" max=\""+(list.get(i).size()-1)+"\" price_per_unit=\"0.5\" />\n");
}
}
for(int i = 0; i < list.size(); i++) {
if(!set.contains(i)) {
if(list.get(i).size() <= 2) {
System.out.print("<feature name=\""+names[i]+"\" type=\"categorical\" optional=\"true\"/> />\n");
} else {
System.out.print("<feature name=\""+names[i]+"\" type=\"numeric\" range=\"0 "+(list.get(i).size()-1)+"\" gap=\"1\" />\n");
}
}
}*/
System.out.print("Unchanged ones: " + set.toString() + "\n");
//if (1==1)return;
try {
BufferedReader reader = new BufferedReader(new FileReader("/Users/"+System.getProperty("user.name")+"/research/experiments-data/public-data/performance/flash-data/Flash-MultiConfig/Data/conex-perf.csv"));
String line = null;
int o = 0;
while ((line = reader.readLine()) != null) {
if(o==0) {
o++;
continue;
}
String r = "";
String[] data = line.split(",");
for(int i = 1; i < data.length - 1; i++) {
if(!set.contains(i)) {
ArrayList<String> subList = list.get(i-1);
int v = subList.indexOf(data[i]);
/*for(String s : subList) {
System.out.print(s + "**\n");
}
System.out.print(data[i] + " : " + " " + subList.contains(data[i])+"\n");*/
r += r.equals("")? v : ":" + v;
}
}
if(map1.containsKey(r)) {
System.out.print(line + " : " + r+ ", current " +map1.get(r) +" duplicate\n");
}
seeds.add(r);
map1.put(r, Double.valueOf(data[data.length-1]));
System.out.print(/*line + " : " + */r + "=" + map1.get(r)+ " and "+"\n");
time += Double.valueOf(data[data.length-1]);
}
System.out.print(map1.size() + "\n");
System.out.print("Mean runtime: " + time/map1.size() + "\n");
//getSeeds();
reader.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
HashSet<String> print_out = new HashSet<String>();
List<Double> o1 = new ArrayList<Double>();
List<Double> o2 = new ArrayList<Double>();
try {
BufferedReader reader = new BufferedReader(new FileReader("/Users/"+System.getProperty("user.name")+"/research/experiments-data/public-data/performance/flash-data/Flash-MultiConfig/Data/conex.txt"));
String line = null;
int o = 0;
ArrayList<Integer> new_ids= new ArrayList<Integer>();
while ((line = reader.readLine()) != null) {
if(o==0) {
o++;
String[] data = line.split(" ");
for (int k = 0; k < names.size(); k++) {
int l = -1;
for(int i = 0; i < data.length; i++) {
if(names.get(k).equals(data[i])) {
l = i;
System.out.print(new_ids.size() + " : " + data[i] + " " + l+"***\n");
}
}
if(l == -1) {
System.out.print(names.get(k) + " has no found\n");
}
new_ids.add(l);
//System.out.print(data.length + " " + l+"***\n");
}
System.out.print(list.size() + " : " + new_ids.size() + " " +"size***\n");
continue;
}
String r = "";
String[] data = line.split(" ");
/*for(String s : data) {
System.out.print(s+"\n");
}
System.out.print("-----\n");
*/
for (int k = 0; k < new_ids.size();k++) {
ArrayList<String> subList = list.get(k);
//System.out.print(data.length + " : " + names.get(k) + " : " + data[new_ids.get(k)] + "***\n");
int v = subList.indexOf(convert(data[new_ids.get(k)]));
r += r.equals("")? v : ":" + v;
}
if(map2.containsKey(r)) {
System.out.print(r+ ", current " +map2.get(r) +" duplicate\n");
}
if(!"".equals(data[1])) {
map2.put(r, Double.valueOf(data[1]));
}
System.out.print(/*line + " : " + */r + "=" + map1.get(r)+ " and "+ map2.get(r)+"\n");
if(map1.containsKey(r) && map2.containsKey(r)) {
double v1 = map1.get(r);
double v2 = map2.get(r);
//v1 = 1.0/v1;
//time += Double.valueOf(data[data.length-1]);
o1.add(v1);
o2.add(v2);
print_out.add("("+v1+","+v2+")");
}
//time += Double.valueOf(data[data.length-1]);
}
System.out.print(map2.size() + "\n");
int p = 0;
for (String s : map1.keySet()) {
if(map2.containsKey(s)) {
p++;
}
}
System.out.print(p);
setAspirationLevel(o1,o2);
//System.out.print("Mean runtime: " + time/map1.size() + "\n");
//getSeeds();
reader.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private static String convert(String s) {
if("FALSE".equals(s)) {
return "False";
}
if("TRUE".equals(s)) {
return "True";
}
return s;
}
public static void read(String name){
// We only need to get rid of the mandatory one or those that do not change at all.
ArrayList<ArrayList<Double>> list = new ArrayList<ArrayList<Double>>();
String[] names = null;
double time = 0.0;
try {
BufferedReader reader = new BufferedReader(new FileReader("/Users/"+System.getProperty("user.name")+"/research/experiments-data/public-data/performance/flash-data/Flash-MultiConfig/Data/"+name+".csv"));
String line = null;
int o = 0;
while ((line = reader.readLine()) != null) {
if(line.contains("$") || o==0) {
String[] dd = line.split(",");
names = dd;
for(String s : dd) {
System.out.print("\"" + s + "\",\n");
}
o++;
continue;
}
String r = "";
String[] data = line.split(",");
for(int i = 0; i < data.length - 2; i++) {
///r += r.equals("")? data[i] : ":" + data[i];
if(list.size() <= i) {
list.add(new ArrayList<Double>());
}
ArrayList<Double> subList = list.get(i);
if(!subList.contains(Double.parseDouble(data[i]))) {
subList.add(Double.parseDouble(data[i]));
}
}
}
reader.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
HashSet<Integer> set = new HashSet<Integer>();
for(int i = 0; i < list.size(); i++) {
ArrayList<Double> subList = list.get(i);
// means it cannot be changed and has no variability
if (subList.size() == 1) {
set.add(i);
} else {
double[] d = new double[subList.size()];
for(int j = 0; j < subList.size(); j++) {
d[j] = subList.get(j);
}
Arrays.sort(d);
subList.clear();
for(int j = 0; j < d.length; j++) {
subList.add((Double)d[j]);
System.out.print("Oringal index: " + i + "=" + d[j] + "\n");
}
}
}
for(int i = 0; i < list.size(); i++) {
if(!set.contains(i)) {
System.out.print("<item name=\""+ names[i] +"\" provision=\"0\" constraint=\"-1\" differences=\"1\" pre_to_max=\"0.7\" pre_of_max=\"0.1\" min=\"0\" max=\""+(list.get(i).size()-1)+"\" price_per_unit=\"0.5\" />\n");
}
}
for(int i = 0; i < list.size(); i++) {
if(!set.contains(i)) {
if(list.get(i).size() <= 2) {
System.out.print("<feature name=\""+names[i]+"\" type=\"categorical\" optional=\"true\"/>\n");
} else {
System.out.print("<feature name=\""+names[i]+"\" type=\"numeric\" range=\"0 "+(list.get(i).size()-1)+"\" gap=\"1\" />\n");
}
}
}
System.out.print("Unchanged ones: " + set.toString() + "\n");
//if (1==1)return;
HashSet<String> print_out = new HashSet<String>();
List<Double> o1 = new ArrayList<Double>();
List<Double> o2 = new ArrayList<Double>();
try {
BufferedReader reader = new BufferedReader(new FileReader("/Users/"+System.getProperty("user.name")+"/research/experiments-data/public-data/performance/flash-data/Flash-MultiConfig/Data/"+name+".csv"));
String line = null;
int o = 0;
while ((line = reader.readLine()) != null) {
if(line.contains("$") || o==0) {
o++;
continue;
}
String r = "";
String[] data = line.split(",");
for(int i = 0; i < data.length - 2; i++) {
if(!set.contains(i)) {
ArrayList<Double> subList = list.get(i);
int v = subList.indexOf(Double.parseDouble(data[i]));
r += r.equals("")? v : ":" + v;
}
}
if(map1.containsKey(r)) {
System.out.print(line + " : " + r+ ", current " +map1.get(r) +" duplicate\n");
}
seeds.add(r);
double v1 = "nan".equals(data[data.length-2]) ? 0.0 : Double.valueOf(data[data.length-2]);
double v2 = "nan".equals(data[data.length-1]) ? 0.0 : Double.valueOf(data[data.length-1]);
if(v1 == 0|| v2 == 0) {
continue;
}
if(v1 < 0) {
v1 = Math.abs(v1);
}
if(v2 < 0) {
v2 = Math.abs(v2);
}
map1.put(r, v1);
map2.put(r, v2);
//System.out.print(/*line + " : " + */r + "=" + map1.get(r)+ " and " + map2.get(r) +"\n");
System.out.print("("+Math.log10((1.0/map1.get(r)))+ "," + Math.log10(map2.get(r)) +")\n");
if(!"nan".equals(data[data.length-1])) {
// v1 = 1.0/v1;//
v1 = -1.0*v1;
time += Double.valueOf(data[data.length-1]);
o1.add(v1);
o2.add(v2);
print_out.add("("+v1+","+v2+")");
}
}
System.out.print(map1.size() + "\n");
System.out.print(print_out.size() + "\n");
System.out.print("Mean runtime: " + time/map1.size() + "\n");
//getSeeds();
reader.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
setAspirationLevel(o1,o2);
Collections.sort(o1);
Collections.sort(o2);
double t1 = Double.MAX_VALUE;
for (double d : o1) {
if(d < t1 && d != o1.get(0) ) {
t1 = d;
}
}
double t2 = Double.MAX_VALUE;
for (double d : o2) {
//System.out.print(d+"\n");
if(d < t2 && d != o2.get(0) ) {
t2 = d;
}
}
/*for (String s : print_out) {
System.out.print(s + "\n");
}*/
if(o1.get(0) <0 && t1 < 0) {
System.out.print((-1.0*(Math.abs(o1.get(0)) - Math.abs(t1) + Math.abs(o1.get(0)))) + "\n");
} else {
System.out.print(o1.get(0) - t1 + o1.get(0) + "\n");
}
System.out.print(o2.get(0) - t2 + o2.get(0) + "\n");
System.out.print("min 1: " + o1.get(0)+ "\n");
System.out.print("min 2: " + o2.get(0) + "\n");
}
//HashMap<Double, List<Double>> map1, HashMap<Double, List<Double>> map2
public static void setAspirationLevel(List<Double> o1, List<Double> o2) {
double[] d1 = new double[o1.size()];
double[] d2 = new double[o2.size()];
for ( int i = 0; i < o1.size(); i++) {
d1[i] = o1.get(i);
d2[i] = o2.get(i);
}
Arrays.sort(d1);
Arrays.sort(d2);
int l1 = 0;
int l2 = 0;
/*if("ShapesAll1".equals(selected) || "Wafer1".equals(selected)) {
l1 = (int)Math.round(d1.length * 0.1);//0.1
l2 = (int)Math.round(d2.length * 0.9);//0.9
} else {*/
l1 = (int)Math.round(d1.length * l1_t);//0.1 0.7 0.1
l2 = (int)Math.round(d2.length * l2_t);//0.9 0.95 0.3
// 0.2, 0.9 for SS-M
// 0.1, 0.9 for SS-N
// 0.1, 0.9 for SS-K
// 0.1, 0.9 for SS-J
// 0.1, 0.3 for ShapesAll and other dnn
// 0.3, 0.9 for xgboost4096
//}
int h1 = 0;
int h2 = 0;
if("feature6".equals(selected)) {
h1 = (int)Math.round(d1.length * 0.8);//0.8
h2 = (int)Math.round(d2.length * 0.2);//0.2
} else {
h1 = (int)Math.round(d1.length * h1_t);//0.9 0.95 0.3
h2 = (int)Math.round(d2.length * h2_t);//0.1 0.7 0.1
// 0.9, 0.1 for SS-M
// 0.9, 0.1 for SS-N
// 0.9, 0.1 for SS-K
// 0.9, 0.1 for SS-J
// others follow SS-M
// 0.3, 0.1 for ShapesAll and other dnn (0.4, 0.1 for DiatomSizeReduction)
// 0.9, 0.3 for xgboost4096
}
int m1 = 0;
int m2 = 0;
/*if("ShapesAll1".equals(selected)) {
m1 = (int)Math.round(d1.length * 0.2);//0.5
m2 = (int)Math.round(d2.length * 0.2);//0.5
} else {*/
m1 = (int)Math.round(d1.length * m1_t);//0.5 0.9 0.2
m2 = (int)Math.round(d2.length * m2_t);//0.5 0.9 0.2
// 0.5, 0.5 for SS-M
// 0.5, 0.5 for SS-N
// 0.5, 0.5 for SS-K
// 0.5, 0.5 for SS-J
// others follow SS-M
// 0.2, 0.2 for ShapesAll and other dnn
// 0.5, 0.5 for xgboost4096
//}
System.out.print("m1: " + m1 + ", m2: " + m1 + "\n");
int u1 = (int)Math.round(d1.length * u_threshold);
int u2 = (int)Math.round(d2.length * u_threshold);
int ex1 = (int)Math.round(d1.length * 0.98);
int ex2 = (int)Math.round(d2.length * 0.98);
System.out.print("ex1: " + ex1 + ", ex2: " + ex2 + "\n");
ex1_v = d1[ex1];
ex2_v = d2[ex2];
System.out.print("d1: " + d1[0] + ":" + d1[d1.length-1] + "\n");
System.out.print("d2: " + d2[0] + ":" + d2[d2.length-1] + "\n");
Parser.d1 = new double[] {d1[0], d1[d1.length-1]};
Parser.d2 = new double[] {d2[0], d2[d2.length-1]};
double[] l = new double[] {d1[l1],d2[l2]};
System.out.print(d1[l1]+ "," + d2[l2] + "\n");
double[] h = new double[] {d1[h1],d2[h2]};
System.out.print(d1[h1] + "," + d2[h2] + "\n");
double[] m = new double[] {d1[m1],d2[m2]};
System.out.print(d1[m1] + "," + d2[m2] + "\n");
double[] u = null;
/*if ("SS-N".equals(selected)) {
u = new double[] {d1[u1] - 0.5*d1[u1],d2[u2] - 0.5*d2[u2]};
} else {
u = new double[] {d1[u1],d2[u2]};
}*/
u = new double[] {d1[u1],d2[u2]};
System.out.print(d1[u1] + "," + d2[u2] + "\n");
System.out.print("-----------\n");
System.out.print(Math.log10(d1[l1])+ "," + Math.log10(d2[l2]) + "\n");
System.out.print(Math.log10(d1[h1]) + "," + Math.log10(d2[h2]) + "\n");
System.out.print(Math.log10(d1[m1]) + "," + Math.log10(d2[m2]) + "\n");
System.out.print(Math.log10(d1[u1]) + "," + Math.log10(d2[u2]) + "\n");
int l_count = 0;
int h_count = 0;
int m_count = 0;
int u_count = 0;
for ( int i = 0; i < o1.size(); i++) {
if(l[0] >= o1.get(i) && l[1] >= o2.get(i)) {
l_count++;
}
if(h[0] >= o1.get(i) && h[1] >= o2.get(i)) {
h_count++;
}
if(m[0] >= o1.get(i) && m[1] >= o2.get(i)) {
m_count++;
}
if(u[0] >= o1.get(i) && u[1] >= o2.get(i)) {
u_count++;
}
}
System.out.print("l_count: " + l_count + "\n");
System.out.print("h_count: " + h_count + "\n");
System.out.print("m_count: " + m_count + "\n");
System.out.print("u_count: " + u_count + "\n");
d_values = new String[4];
d_values[0] = String.valueOf(l[0])+","+String.valueOf(l[1]);
d_values[1] = String.valueOf(h[0])+","+String.valueOf(h[1]);
d_values[2] = String.valueOf(m[0])+","+String.valueOf(m[1]);
d_values[3] = String.valueOf(u[0])+","+String.valueOf(u[1]);
//System.out.print( d_values[3]+"\n");
}
public static void validateUnchanged(){
}
public static void validate(){
try {
BufferedReader reader = new BufferedReader(new FileReader("/Users/"+System.getProperty("user.name")+"/research/experiments-data/fuzzy-requirement/single-objective-dataset/"+selected+".csv"));
String line = null;
int[] store = null;
int total = 0;
while ((line = reader.readLine()) != null) {
if(line.startsWith("$")) {
String[] d = line.split(",");
for (int i = 0; i < d.length; i++) {
//System.out.print("\""+d[i].substring(1) + "\",\n");
}
continue;
}
String[] data = line.split(",");
if(store == null) {
store = new int[data.length - 1];
for(int i = 0; i < store.length; i++) {
store[i] = 0;
}
}
for(int i = 0; i < store.length; i++) {
if(data[i].equals("1")) {
store[i] += 1;
}
}
total++;
}
String r = "";
for(int i = 0; i < store.length; i++) {
if(store[i] == total) {
r += i + ",";
}
}
System.out.print(r);
reader.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static List<String> getSeeds(){
int no = EAConfigure.getInstance().pop_size;
List<String> list = new ArrayList<String>();
/*for (int i = 0; i < seeds.size(); i++ ) {
System.out.print(i+"\n");
list.add(seeds.get(i));
}*/
int gap = seeds.size() / no;
for (int i = 0; i < seeds.size(); i=i+gap ) {
System.out.print(i+"\n");
list.add(seeds.get(i));
}
if (list.size() < no) {
list.add(seeds.get(seeds.size()-1));
}
if (list.size() > no) {
list.remove(list.size()-1);
}
for (int i = 0; i < list.size(); i++ ) {
System.out.print(list.get(i) + "\n");
}
System.out.print(list.size());
return list;
}
private static void normalize(){
double max = 17.894581279143072;
double v = 4.1823277703510335;
double min = 0;
v = (v - min) / (max - min);
System.out.print((0.3 * v) + 1.2);
/**
*17.894581279143072
10.953841910378587
4.819035135705402
4.1823277703510335
1.0097075186941624
*/
}
/**
* apache=0.08888888888888889;0.36666666666666664;0.6444444444444445;
* bdbc=0.011525532255482631;0.11996467982050739;0.37815312640389964;
* bdbj=0.025053422739665463;0.15032053643799279;0.5187532237860143;
* llvm=0.290950744558992;0.43413516609392905;0.7205040091638032;
* x264=0.26962281884538364;0.6158034940015544;0.9619841691577251;
* sql=0.11226371599815588;0.45804518211157225;0.6885661595205165;
*/
private static void run_normalize(){
String[] a = new String[]{"13.0", "14.5", "15.5"};
double max = 16.851;
double min = 12.513;
for (String s : a) {
double v = Double.parseDouble(s);
v = (v - min) / (max - min);
System.out.print(v+";");
}
}
}
| 38,120 | 25.751579 | 217 |
java
|
null |
aspiration-study-main/code/org/sas/benchmark/req/spo/PythonRunner.java
|
package org.sas.benchmark.req.spo;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.Map;
public class PythonRunner {
static String prefix = "/Users/" + System.getProperty("user.name") + "/research/monitor/";
static String test_data = " [\"p0-p0\",650,616,674,606,578,792,618,674,632,652,704,730,622,692,652,688,646,628,682,618,652,670,628,728,598,622,624,628,604,630,670,618,636,598,614,606,760,676,604,600,620,658,630,606,624,664,602,656,624,688,692,614,640,620,654,788,582,612,700,634,716,652,698,630,626,606,778,598,674,906,686,636,736,846,650,624,806,718,610,634,616,640,606,628,616,904,640,668,620,612,596,648,664,610,694,624,640,620,728,680],\n" +
"[\"p0-p1\",642,590,646,636,612,698,650,652,754,598,690,598,658,594,618,640,626,610,660,638,640,600,610,598,716,766,634,564,600,600,638,618,766,630,616,604,744,630,592,624,638,684,892,600,642,586,632,700,614,932,620,654,624,626,642,674,594,596,784,666,608,606,688,578,598,694,630,594,654,588,608,652,620,592,670,652,590,850,586,680,626,626,638,660,646,854,590,610,792,658,576,696,678,650,622,662,642,774,658,668],\n" +
"[\"p1-p0\",846,842,744,1554,746,650,686,920,1050,892,1870,750,696,604,848,716,936,782,832,824,788,1132,722,664,866,796,1138,1242,842,744,818,858,950,864,688,828,884,748,840,820,780,822,746,888,960,694,696,848,700,1134,762,972,898,708,734,1834,678,1176,784,732,726,866,896,676,678,1726,910,666,1024,1016,1020,798,1080,740,702,828,730,722,942,782,708,778,800,1004,968,702,708,1310,920,644,1510,754,1146,1106,1790,1038,692,798,926,604],\n" +
"[\"p0-p2\",658,598,644,580,596,752,606,626,622,858,660,584,692,694,740,594,784,612,680,586,604,576,654,630,630,604,666,574,666,640,588,570,638,810,680,1442,624,622,692,590,586,608,586,686,580,566,590,620,572,612,600,650,614,650,650,654,626,602,716,594,596,620,546,580,622,588,1374,590,624,592,636,632,604,576,568,604,576,592,634,644,582,594,606,638,624,670,672,610,640,570,602,722,620,604,630,648,630,630,596,634],\n" +
"[\"p2-p0\",1714,914,812,828,816,944,846,854,780,700,736,796,858,950,1040,1066,782,1066,770,676,1316,940,886,710,1220,794,920,668,814,1308,772,844,748,1548,808,816,726,798,810,848,912,1226,978,786,814,1192,1042,790,912,786,760,842,1134,674,970,746,1018,782,816,1060,1000,718,1112,750,936,674,1504,1140,996,870,1200,1532,822,722,814,804,788,712,1272,800,1268,1794,870,1228,752,1028,680,944,2438,902,868,680,932,960,898,758,864,658,1148,708],\n" +
"[\"p0-p3\",778,590,664,776,636,686,734,662,722,844,636,616,638,640,626,636,650,694,624,658,662,628,612,638,628,596,584,588,604,636,580,612,666,668,662,642,616,652,590,666,634,634,652,686,626,640,628,674,588,676,684,708,618,622,660,606,644,622,676,694,698,666,724,698,676,648,652,810,618,656,680,770,658,630,616,652,628,744,658,704,696,650,624,724,570,662,604,654,650,624,726,614,650,722,616,620,698,582,610,662],\n" +
"[\"p3-p0\",618,704,576,620,650,636,662,590,604,636,714,664,666,646,612,632,640,600,666,708,676,620,576,782,624,594,628,578,612,628,630,630,576,588,816,620,610,654,624,646,608,664,630,652,592,640,612,702,700,602,638,602,600,600,632,590,596,696,606,680,636,776,594,582,602,840,662,596,664,676,658,626,564,636,596,678,622,660,672,608,588,632,626,606,718,684,580,628,592,622,670,716,608,676,606,596,662,618,622,664],\n" +
"[\"p1-p1\",572,564,614,574,606,584,578,578,606,578,620,554,584,590,570,658,610,608,630,572,610,564,610,588,570,616,614,586,604,594,598,594,608,660,560,596,670,562,560,594,664,610,602,608,626,692,572,614,586,596,622,604,630,652,618,626,668,580,614,628,596,584,638,574,600,640,608,646,586,616,638,628,652,598,638,616,604,658,618,596,688,600,600,592,592,600,656,612,604,632,618,612,580,612,558,572,616,594,582,612],\n" +
"[\"p2-p2\",558,602,594,594,584,562,624,578,580,808,574,578,586,574,592,618,608,572,578,642,608,630,582,562,598,570,630,614,610,648,620,580,576,640,618,592,610,586,606,588,592,624,568,600,710,576,574,612,640,576,644,578,598,584,596,616,578,622,580,614,624,640,632,656,586,604,564,572,578,614,682,608,662,596,576,658,678,630,630,608,586,624,640,584,596,594,612,620,638,596,598,582,580,568,614,608,596,604,592,574],\n" +
"[\"p3-p3\",618,636,574,606,694,670,620,644,626,632,638,614,586,740,612,646,730,674,644,684,654,598,624,606,602,590,604,670,718,654,624,606,638,722,594,666,650,694,598,650,646,626,644,638,732,680,666,650,650,672,678,710,666,618,652,624,678,684,702,606,694,622,696,1040,688,636,654,668,696,650,666,912,640,686,682,644,616,668,598,674,696,646,662,604,658,626,688,710,582,654,702,680,618,696,600,684,690,652,622,730],\n" +
"[\"p1-p2\",620,582,624,588,602,574,636,568,604,598,580,584,628,584,580,602,600,610,574,588,656,636,778,618,594,576,580,624,626,592,620,574,674,638,614,668,614,560,570,588,610,592,588,586,620,614,638,584,610,666,564,598,596,576,584,572,614,590,618,566,624,620,580,620,676,566,602,582,582,636,580,590,566,614,678,564,598,582,588,608,606,654,596,602,598,646,592,586,606,636,584,574,612,620,728,598,640,608,702,582],\n" +
"[\"p2-p1\",616,566,592,606,588,662,586,636,602,570,630,628,662,570,590,592,586,590,604,614,568,658,574,588,580,578,588,612,568,580,568,630,618,668,580,584,580,584,588,574,610,590,578,642,584,610,564,608,572,630,598,642,638,642,586,592,574,594,566,608,592,566,586,644,658,646,568,568,570,606,562,572,562,588,592,570,634,630,580,602,620,604,594,584,584,608,630,578,582,586,608,590,580,642,588,584,618,606,584,550],\n" +
"[\"p1-p3\",928,846,764,792,778,978,830,2430,924,692,1642,1532,702,724,792,720,902,930,984,732,1230,752,728,1034,754,834,1110,680,1166,642,878,720,958,1018,816,710,1188,852,796,750,1566,880,1142,652,784,774,958,810,662,1666,924,1450,702,828,702,830,934,776,840,702,612,822,1016,668,902,1092,774,694,656,894,1168,742,1922,652,784,1604,1246,794,716,794,724,740,840,764,754,918,690,770,754,988,1434,950,742,812,788,794,774,1098,1022,798],\n" +
"[\"p3-p1\",662,640,644,596,616,556,578,620,630,626,614,618,576,674,614,622,652,574,612,624,616,630,616,608,624,722,678,588,722,714,660,610,634,630,592,680,804,656,620,650,646,638,570,598,602,782,636,612,602,592,664,772,1212,636,576,658,590,656,600,688,606,640,630,592,644,594,582,606,608,708,654,640,642,1084,636,720,618,664,700,600,582,592,608,618,586,654,724,576,606,612,618,646,724,624,668,656,814,586,674,666],\n" +
"[\"p2-p3\",1772,712,770,720,760,686,974,832,918,1004,706,968,830,734,740,984,682,836,1042,868,862,1158,674,882,878,702,960,850,1220,788,842,598,740,808,672,786,1342,980,792,1208,1072,746,758,766,606,820,926,884,762,1012,2042,632,748,838,674,872,804,1036,830,830,686,814,756,914,932,1654,1420,1332,1514,682,644,800,954,652,1480,738,948,882,638,946,758,936,1962,770,1056,1328,988,780,716,1074,1612,692,940,1024,1096,1028,1162,864,666,676],\n" +
"[\"p3-p2\",634,594,650,694,666,676,596,574,624,594,650,592,652,678,652,646,606,592,626,720,596,616,628,620,656,658,614,614,676,754,808,692,642,582,642,594,602,624,616,584,630,632,594,670,610,636,606,1192,806,634,588,604,570,738,606,546,660,606,652,678,572,646,610,628,634,584,678,712,624,622,730,638,604,662,668,620,614,590,620,638,618,682,614,620,604,954,636,684,1170,688,596,608,656,710,670,710,616,624,682,556]\n" +
"";
public static void main(String[] arg) {
Map<String, Integer> map = new HashMap<String, Integer>();
/*run(map);*/
start(map, test_data);
for (String s : map.keySet()) {
System.out.print(s + "="+map.get(s) + "\n");
}
}
public static void start(Map<String, Integer> map, String data) {
String main = "";
try {
BufferedReader reader = new BufferedReader(new FileReader(prefix + "stats2.py"));
String line = null;
while ((line = reader.readLine()) != null) {
main += line + "\n";
}
reader.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
main = main.replace("[java_filling_data]", data +"\n");
//System.out.print(main);
try {
File f = new File(prefix + "stats2-copy.py");
if(f.exists()) {
f.delete();
}
BufferedWriter bw = new BufferedWriter(new FileWriter(prefix + "stats2-copy.py", false));
bw.write(main);
bw.flush();
bw.close();
} catch (Exception e) {
e.printStackTrace();
}
run(map);
}
public static void run(Map<String, Integer> map) {
ProcessBuilder processBuilder = new ProcessBuilder();
// Windows
processBuilder.command("python", prefix + "stats2-copy.py", "--demo");
processBuilder.redirectErrorStream(true);
try {
Process process = processBuilder.start();
int exitCode = process.waitFor();
BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
String line;
while ((line = reader.readLine()) != null) {
// System.out.print(line + "\n");
if (!line.trim().equals("")) {
String[] s = line.split(",");
if (!map.containsKey(s[1].trim())) {
map.put(s[1].trim(), 0);
}
map.put(s[1].trim(), map.get(s[1].trim()) + Integer.parseInt(s[0].trim()));
}
}
System.out.println("\nExited with error code : " + exitCode);
} catch (Exception e) {
e.printStackTrace();
}
}
}
| 9,298 | 69.984733 | 453 |
java
|
null |
aspiration-study-main/code/org/sas/benchmark/req/spo/Simulator.java
|
package org.sas.benchmark.req.spo;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import org.femosaa.core.EAConfigure;
import org.femosaa.core.SASAlgorithmAdaptor;
import org.femosaa.seed.FixedSeeder;
import org.femosaa.seed.NewSeeder;
import org.femosaa.util.Logger;
import org.ssase.Service;
import org.ssase.objective.Objective;
import org.ssase.objective.QualityOfService;
import org.ssase.objective.optimization.femosaa.ibea.IBEAwithKAndDRegion;
import org.ssase.objective.optimization.femosaa.moead.MOEADRegion;
import org.ssase.objective.optimization.femosaa.moead.MOEAD_STMwithKAndDRegion;
import org.ssase.objective.optimization.femosaa.nsgaii.NSGAIIwithKAndDRegion;
import org.ssase.objective.optimization.gp.GPRegion;
import org.ssase.objective.optimization.hc.HCRegion;
import org.ssase.objective.optimization.rs.RSRegion;
import org.ssase.objective.optimization.sga.SGARegion;
import org.ssase.primitive.ControlPrimitive;
import org.ssase.primitive.Primitive;
import org.ssase.region.OptimizationType;
import org.ssase.region.Region;
import org.ssase.util.Repository;
import org.ssase.util.Ssascaling;
/**
*
*
*/
public class Simulator
{
static List<Objective> o = new ArrayList<Objective>();
static List<ControlPrimitive> cp = null;
//static List<Double> overall = new ArrayList<Double>();
public static String alg = "hc";
public static double[] weights;
public static void main( String[] args )
{
setup();
main_test();
}
public static void setup() {
Ssascaling.activate();
Parser.main(null);
EAConfigure.getInstance().setupFLASHConfiguration();
FixedSeeder.getInstance().setSeeds(Parser.getSeeds());
//System.out.print(EAConfigure.getInstance().generation + "*********\n");
// List<WSAbstractService> as = workflow.all;
// List<WSConcreteService> exist = new ArrayList<ConcreteService>();
// for (AbstractService a : as) {
// exist.addAll(a.getOption());
// }
cp = new ArrayList<ControlPrimitive>();
Set<ControlPrimitive> set = new HashSet<ControlPrimitive>();
for (Service s : Repository.getAllServices()) {
for (Primitive p : s.getPossiblePrimitives()) {
if (p instanceof ControlPrimitive) {
set.add((ControlPrimitive) p);
}
}
}
cp.addAll(set);
Collections.sort(cp, new Comparator() {
public int compare(Object arg0, Object arg1) {
ControlPrimitive cp1 = (ControlPrimitive) arg0;
ControlPrimitive cp2 = (ControlPrimitive) arg1;
int in1 = VariableOrder.getList().indexOf(cp1.getName());
int in2 = VariableOrder.getList().indexOf(cp2.getName());
//System.out.print(value1 + "-----------:------------" + value2 + "\n");
return in1 < in2 ? -1 : 1;
}
});
// Assume all objectives have the same order and inputs
for (ControlPrimitive p : cp) {
System.out.print("*****" + p.getName() + "\n");
}
// Region.selected = OptimizationType.FEMOSAA01 ;
Ssascaling.loadFeatureModel(cp);
// compact(cp, "CS1", 0);
// compact(cp, "CS2", 1);
// compact(cp, "CS3", 2);
// compact(cp, "CS4", 3);
// compact(cp, "CS5", 4);
// if(1==1)
// return;
BenchmarkDelegate qos0 = new BenchmarkDelegate(0);
BenchmarkDelegate qos1 = new BenchmarkDelegate(1);
//BenchmarkDelegate qos1 = new WSSOADelegate(1, workflow);
//BenchmarkDelegate qos2 = new WSSOADelegate(2, workflow);
Set<Objective> obj = Repository.getAllObjectives();
// for (Objective ob : obj) {
//
// for (String s : remove_strings) {
// if(s.equals(ob.getName())) {
// obj.remove(ob);
// }
// }
//
// }
//
for (Objective ob : obj) {
if ("sas-rubis_software-P1".equals(ob.getName())) {
o.add(ob);
}
}
for (Objective ob : obj) {
if ("sas-rubis_software-P2".equals(ob.getName())) {
o.add(ob);
}
}
// for (Objective ob : obj) {
// if ("sas-rubis_software-Throughput".equals(ob.getName())) {
// o.add(ob);
// }
// }
//
// for (Objective ob : obj) {
// if ("sas-rubis_software-Cost".equals(ob.getName())) {
// o.add(ob);
// }
// }
for (Objective ob : o) {
QualityOfService qos = (QualityOfService) ob;
if (qos.getName().equals("sas-rubis_software-P1")) {
qos.setDelegate(qos0);
} else if (qos.getName().equals("sas-rubis_software-P2")) {
qos.setDelegate(qos1);
}
// else if (qos.getName().equals("sas-rubis_software-Throughput")) {
// qos.setDelegate(qos1);
// } else {
// qos.setDelegate(qos2);
// }
}
}
public static void main_test() {
//Repository.initUniformWeight("W3D_105.dat", 105);
//int max_number_of_eval_to_have_only_seed = 0;
long time = 0;
int n = 100;//30
for (int i = 0; i < n;/*1*/ i++) {
long t = System.currentTimeMillis();
//org.femosaa.core.SASSolution.putDependencyChainBack();
// preRunAOOrSOSeed();
if(alg.equals("ga")) {
GA(weights);
} else if(alg.equals("nsgaii")) {
NSGAII();
} else if(alg.equals("ibea")) {
IBEA();
} else if(alg.equals("moead")) {
MOEAD();
}
//testGA();
//testHC();
//testRS();
// if(1==1) return;
//
time += System.currentTimeMillis() - t;
}
//for (Double d : overall) {
// System.out.print("("+d + ")\n");
//}
}
private static void GA(double[] weights) {
double[] r = null;
Region.selected = OptimizationType.SGA;
System.out
.print("=============== SGARegion ===============\n");
SGARegion moead = new SGARegion(weights);
moead.addObjectives(o);
long time = System.currentTimeMillis();
LinkedHashMap<ControlPrimitive, Double> result = moead.optimize();
//BenchmarkDelegate qos0 = new BenchmarkDelegate();
double[] x = new double[result.size()];
int i = 0;
for (Entry<ControlPrimitive, Double> e : result.entrySet()) {
x[i] = e.getValue();
i++;
}
//overall.add(qos0.predict(x)/100);
// r = getFitness(moead.optimize());
org.ssase.util.Logger.logOptimizationTime(null,
String.valueOf((System.currentTimeMillis() - time)));
// logData("sas", "Throughput", String.valueOf(r[0]));
// logData("sas", "Cost", String.valueOf(r[1]));
}
private static void NSGAII() {
double[] r = null;
Region.selected = OptimizationType.NSGAII;
System.out
.print("=============== NSGAIIRegion ===============\n");
NSGAIIwithKAndDRegion moead = new NSGAIIwithKAndDRegion();
moead.addObjectives(o);
long time = System.currentTimeMillis();
LinkedHashMap<ControlPrimitive, Double> result = moead.optimize();
//BenchmarkDelegate qos0 = new BenchmarkDelegate();
double[] x = new double[result.size()];
int i = 0;
for (Entry<ControlPrimitive, Double> e : result.entrySet()) {
x[i] = e.getValue();
i++;
}
//overall.add(qos0.predict(x)/100);
// r = getFitness(moead.optimize());
org.ssase.util.Logger.logOptimizationTime(null,
String.valueOf((System.currentTimeMillis() - time)));
// logData("sas", "Throughput", String.valueOf(r[0]));
// logData("sas", "Cost", String.valueOf(r[1]));
}
private static void IBEA() {
double[] r = null;
Region.selected = OptimizationType.IBEA;
System.out
.print("=============== IBEARegion ===============\n");
IBEAwithKAndDRegion moead = new IBEAwithKAndDRegion();
moead.addObjectives(o);
long time = System.currentTimeMillis();
LinkedHashMap<ControlPrimitive, Double> result = moead.optimize();
//BenchmarkDelegate qos0 = new BenchmarkDelegate();
double[] x = new double[result.size()];
int i = 0;
for (Entry<ControlPrimitive, Double> e : result.entrySet()) {
x[i] = e.getValue();
i++;
}
//overall.add(qos0.predict(x)/100);
// r = getFitness(moead.optimize());
org.ssase.util.Logger.logOptimizationTime(null,
String.valueOf((System.currentTimeMillis() - time)));
// logData("sas", "Throughput", String.valueOf(r[0]));
// logData("sas", "Cost", String.valueOf(r[1]));
}
private static void MOEAD() {
double[] r = null;
//Region.selected = OptimizationType.FEMOSAA;
Region.selected = OptimizationType.MOEADkd;
System.out
.print("=============== MOEADRegion ===============\n");
//MOEAD_STMwithKAndDRegion moead = new MOEAD_STMwithKAndDRegion();
MOEADRegion moead = new MOEADRegion();
moead.addObjectives(o);
long time = System.currentTimeMillis();
LinkedHashMap<ControlPrimitive, Double> result = moead.optimize();
//BenchmarkDelegate qos0 = new BenchmarkDelegate();
double[] x = new double[result.size()];
int i = 0;
for (Entry<ControlPrimitive, Double> e : result.entrySet()) {
x[i] = e.getValue();
i++;
}
//overall.add(qos0.predict(x)/100);
// r = getFitness(moead.optimize());
org.ssase.util.Logger.logOptimizationTime(null,
String.valueOf((System.currentTimeMillis() - time)));
// logData("sas", "Throughput", String.valueOf(r[0]));
// logData("sas", "Cost", String.valueOf(r[1]));
}
}
/*
*
* */
| 9,067 | 26.149701 | 79 |
java
|
null |
aspiration-study-main/code/org/sas/benchmark/req/spo/VariableOrder.java
|
package org.sas.benchmark.req.spo;
import java.util.ArrayList;
import java.util.List;
public class VariableOrder {
private static List<String> SS_N = new ArrayList<String>();/*diffcult*/
private static List<String> SS_O = new ArrayList<String>();/*diffcult*/
private static List<String> SS_M = new ArrayList<String>();/*p=100,g=100*,s*/
private static List<String> SS_K = new ArrayList<String>();/*p=50,g=30,s*/
private static List<String> SS_J = new ArrayList<String>();/*p=50,g=30,s*/
private static List<String> SS_L = new ArrayList<String>();/*p=20,g=25,s*/
private static List<String> SS_I = new ArrayList<String>();/*p=20,g=25,s*/
private static List<String> SS_A = new ArrayList<String>();/*p=20,g=25,s*/
private static List<String> SS_C = new ArrayList<String>();/*p=20,g=25,s*/
private static List<String> SS_E = new ArrayList<String>();/*p=20,g=15,s*/
private static List<String> CONEX = new ArrayList<String>();/*p=100,g=100,s*/
private static List<String> DiatomSizeReduction = new ArrayList<String>();/*p=100,g=100,s*/
private static List<String> Adiac = new ArrayList<String>();/*p=100,g=100,s*/
private static List<String> Coffee = new ArrayList<String>();/*p=100,g=100,s*/
private static List<String> ShapesAll = new ArrayList<String>();/*p=100,g=100,s*/
private static List<String> Wafer = new ArrayList<String>();/*p=100,g=100,s*/
private static List<String> xgboost4096 = new ArrayList<String>();/*p=100,g=100,s*/
private static List<String> feature6 = new ArrayList<String>();/*p=50,g=20,s*/
private static List<String> feature7 = new ArrayList<String>();/*p=50,g=20,s*/
private static List<String> feature8 = new ArrayList<String>();/*p=50,g=20,s*/
private static List<String> feature9 = new ArrayList<String>();/*p=50,g=20,s*/
private static List<String> LSTM = new ArrayList<String>();/*p=100,g=100,s*/
static {
// The excluded ones are still here but they do not affect the order
String[] array = new String[]{
"no_mbtree",
"no_asm",
"no_cabac",
"no_scenecut",
"aq_strength",
"bframes",
"qcomp",
"qp",
"ref",
"rc_lookahead",
"b_bias",
"threads",
"keyint",
"crf",
"scenecut",
"seek",
"ipratio"
};
attach(SS_N, array);
array = new String[]{
"extrema",
"enabledOptimizations",
"disabledOptimizations",
"ls",
"dcr",
"cf",
"lir",
"inl",
"lur",
"wlur",
"prfunr",
"lus",
"cse",
"dfr",
"wlt",
"wlf",
"awlf",
"ivecyc",
"ive",
"ivesli",
"wlflt",
"ae",
"dl",
"rco",
"uip",
"dr",
"ipc",
"wlpg",
"cp",
"vp",
"srf",
"phm",
"dpa",
"msca",
"wls",
"as",
"wlsimp",
"cwle",
"lro",
"lao",
"pra",
"rnb",
"rip",
"sde",
"wlprop",
"saa",
"cyc",
"scyc",
"saacyc",
"wlsd",
"cts",
"ucts",
"maxoptcyc",
"maxlur",
"maxwlur",
"maxprfur",
"maxae",
"initmheap",
"initwheap"
};
attach(SS_O, array);
array = new String[]{
"F",
"smoother",
"colorGS",
"relaxParameter",
"V",
"Jacobi",
"line",
"zebraLine",
"cycle",
"alpha",
"beta",
"preSmoothing",
"postSmoothing"
};
attach(SS_M, array);
array = new String[]{
"spouts",
"max_spout",
"spout_wait",
"spliters",
"counters",
"netty_min_wait"
};
attach(SS_K, array);
array = new String[]{
"spouts",
"max_spout",
"sorters",
"emit_freq",
"chunk_size",
"message_size"
};
attach(SS_J, array);
array = new String[]{
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k"
};
attach(SS_L, array);
array = new String[]{
"spout",
"split",
"count",
"buffer-size",
"heap"
};
attach(SS_I, array);
array = new String[]{
"spout_wait",
"spliters",
"counters"
};
attach(SS_A, array);
array = new String[]{
"spout_wait",
"spliters",
"counters"
};
attach(SS_C, array);
array = new String[]{
"max_spout",
"spliters",
"counters"
};
attach(SS_E, array);
array = new String[]{
"mapreduce.job.max.split.locations",
"mapreduce.job.running.map.limit",
"yarn.scheduler.minimum-allocation-vcores",
"yarn.scheduler.minimum-allocation-mb",
"mapreduce.job.jvm.numtasks",
"mapreduce.input.fileinputformat.split.minsize",
"yarn.scheduler.maximum-allocation-mb",
"io.map.index.skip",
"yarn.nodemanager.windows-container.memory-limit.enabled",
"mapreduce.job.ubertask.enable",
"mapreduce.job.speculative.retry-after-no-speculate",
"mapreduce.input.lineinputformat.linespermap",
"mapreduce.job.reduce.slowstart.completedmaps",
"yarn.resourcemanager.scheduler.client.thread-count",
"yarn.resourcemanager.client.thread-count",
"dfs.replication",
"io.seqfile.sorter.recordlimit",
"mapreduce.job.running.reduce.limit",
"yarn.scheduler.maximum-allocation-vcores",
"yarn.resourcemanager.resource-tracker.client.thread-count",
"mapreduce.ifile.readahead",
"yarn.nodemanager.windows-container.cpu-limit.enabled",
"yarn.sharedcache.enabled",
"yarn.sharedcache.client-server.thread-count",
"io.seqfile.compress.blocksize",
"mapreduce.job.speculative.minimum-allowed-tasks",
"mapreduce.ifile.readahead.bytes",
"yarn.resourcemanager.amlauncher.thread-count",
"io.map.index.interval",
"yarn.sharedcache.admin.thread-count",
"yarn.resourcemanager.admin.client.thread-count"
};
attach(CONEX, array);
array = new String[]{
"vm_type",
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k",
"l"
};
attach(DiatomSizeReduction, array);
array = new String[]{
"vm_type",
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k",
"l"
};
attach(Adiac, array);
array = new String[]{
"vm_type",
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k",
"l"
};
attach(Coffee, array);
array = new String[]{
"vm_type",
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k",
"l"
};
attach(ShapesAll, array);
attach(Wafer, array);
array = new String[]{
"vm_type",
"min_child_weight",
"nthread",
"n_estimators",
"max_depth",
"learning_rate",
"max_delta_step",
"subsample",
"colsample_bytree",
"lambda",
"alpha",
"scale_pos_weight",
"colsample_bylevel"
};
attach(xgboost4096, array);
array = new String[]{
"topology.workers",
"component.bolt_num",
"topology.acker.executors",
"message.size",
"component.spout_num",
"topology.serialized.message.size.metrics",
"topology.max.spout.pending",
"storm.messaging.netty.min_wait_ms",
"topology.transfer.buffer.size",
"storm.messaging.netty.max_wait_ms",
"topology.level",
"topology.priority"
};
attach(feature6, array);
attach(feature7, array);
attach(feature8, array);
attach(feature9, array);
array = new String[]{
"a","b","c","d","e","f","size","arch","link"
};
attach(LSTM, array);
}
public static List<String> getList(){
if("SS-N".equals(Parser.selected)) {
return SS_N;
} else if("SS-O".equals(Parser.selected)) {
return SS_O;
} else if("SS-M".equals(Parser.selected)) {
return SS_M;
} else if("SS-K".equals(Parser.selected)) {
return SS_K;
} else if("SS-J".equals(Parser.selected)) {
return SS_J;
} else if("SS-L".equals(Parser.selected)) {
return SS_L;
} else if("SS-I".equals(Parser.selected)) {
return SS_I;
} else if("SS-A".equals(Parser.selected)) {
return SS_A;
} else if("SS-C".equals(Parser.selected)) {
return SS_C;
} else if("SS-E".equals(Parser.selected)) {
return SS_E;
} else if("CONEX".equals(Parser.selected)) {
return CONEX;
} else if("DiatomSizeReduction".equals(Parser.selected)) {
return DiatomSizeReduction;
} else if("Adiac".equals(Parser.selected)) {
return Adiac;
} else if("Coffee".equals(Parser.selected)) {
return Coffee;
} else if("ShapesAll".equals(Parser.selected)) {
return ShapesAll;
} else if("Wafer".equals(Parser.selected)) {
return Wafer;
} else if("xgboost4096".equals(Parser.selected)) {
return xgboost4096;
} else if("feature6".equals(Parser.selected)) {
return feature6;
} else if("feature7".equals(Parser.selected)) {
return feature7;
} else if("feature8".equals(Parser.selected)) {
return feature8;
} else if("feature9".equals(Parser.selected)) {
return feature9;
} else if("LSTM".equals(Parser.selected)) {
return LSTM;
}
return null;
}
private static void attach(List<String> list, String[] array){
for (int i = 0; i < array.length; i++) {
list.add(array[i]);
}
}
public static void main(String[] arg) {
for (int i = 0; i < SS_N.size(); i++) {
//System.out.print(" <feature name=\""+X264.get(i)+"\" type=\"categorical\" optional=\"true\"/>\n");
System.out.print("<item name=\""+SS_N.get(i)+"\" provision=\"0\" constraint=\"-1\" differences=\"1\" pre_to_max=\"0.7\" pre_of_max=\"0.1\" min=\"0\" max=\"1\" price_per_unit=\"0.5\" />\n");
}
}
}
| 9,730 | 20.720982 | 193 |
java
|
null |
Causal-Learner-main/Code/common/Feast/FEAST/java/src/main/java/craigacp/feast/Dataset.java
|
/*
** Dataset.java
**
** Initial Version - 07/01/2017
** Author - Adam Pocock
**
** Part of the FEAture Selection Toolbox (FEAST), please reference
** "Conditional Likelihood Maximisation: A Unifying Framework for Information
** Theoretic Feature Selection"
** G. Brown, A. Pocock, M.-J. Zhao, M. Lujan
** Journal of Machine Learning Research (JMLR), 2012
**
** Please check www.github.com/Craigacp/FEAST for updates.
**
** Copyright (c) 2010-2017, A. Pocock, G. Brown, The University of Manchester
** All rights reserved.
**
** Redistribution and use in source and binary forms, with or without modification,
** are permitted provided that the following conditions are met:
**
** - Redistributions of source code must retain the above copyright notice, this
** list of conditions and the following disclaimer.
** - Redistributions in binary form must reproduce the above copyright notice,
** this list of conditions and the following disclaimer in the documentation
** and/or other materials provided with the distribution.
** - Neither the name of The University of Manchester nor the names of its
** contributors may be used to endorse or promote products derived from this
** software without specific prior written permission.
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
** ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
** WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
** DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
** ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
** (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
** LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
** ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
** SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**
*/
package craigacp.feast;
/**
*
* @author craigacp
*/
public class Dataset {
public final int[] labels;
public final int[][] data;
public Dataset(int[] labels, int[][] data) {
this.labels = labels;
this.data = data;
}
}
| 2,331 | 38.525424 | 83 |
java
|
null |
Causal-Learner-main/Code/common/Feast/FEAST/java/src/main/java/craigacp/feast/FEAST.java
|
/*
** FEAST.java
**
** Initial Version - 07/01/2017
** Author - Adam Pocock
**
** Part of the FEAture Selection Toolbox (FEAST), please reference
** "Conditional Likelihood Maximisation: A Unifying Framework for Information
** Theoretic Feature Selection"
** G. Brown, A. Pocock, M.-J. Zhao, M. Lujan
** Journal of Machine Learning Research (JMLR), 2012
**
** Please check www.github.com/Craigacp/FEAST for updates.
**
** Copyright (c) 2010-2017, A. Pocock, G. Brown, The University of Manchester
** All rights reserved.
**
** Redistribution and use in source and binary forms, with or without modification,
** are permitted provided that the following conditions are met:
**
** - Redistributions of source code must retain the above copyright notice, this
** list of conditions and the following disclaimer.
** - Redistributions in binary form must reproduce the above copyright notice,
** this list of conditions and the following disclaimer in the documentation
** and/or other materials provided with the distribution.
** - Neither the name of The University of Manchester nor the names of its
** contributors may be used to endorse or promote products derived from this
** software without specific prior written permission.
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
** ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
** WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
** DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
** ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
** (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
** LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
** ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
** SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**
*/
package craigacp.feast;
import java.util.List;
/**
* The accessor class for FEAST algorithms.
*
* Flag mapping:
* 1 = CMIM
* 2 = CondMI
* 3 = DISR
* 4 = ICAP
* 5 = JMI
* 6 = MIM
* 7 = mRMR
*
* @author craigacp
*/
public abstract class FEAST {
private FEAST() {}
static {
System.loadLibrary("feast-java");
}
public static native ScoredFeatures BetaGamma(int numFeatures, int[][] dataMatrix, int[] labels, double beta, double gamma);
public static <T> ScoredFeatures BetaGamma(int numFeatures, List<List<T>> dataMatrix, List<T> labels, double beta, double gamma) {
int[][] data = FEASTUtil.convertMatrix(dataMatrix);
int[] lab = FEASTUtil.convertList(labels);
return BetaGamma(numFeatures,data,lab,beta,gamma);
}
public static ScoredFeatures CMIM(int numFeatures, int[][] dataMatrix, int[] labels) {
return feast(1,numFeatures,dataMatrix,labels);
}
public static <T> ScoredFeatures CMIM(int numFeatures, List<List<T>> dataMatrix, List<T> labels) {
return feast(1,numFeatures,dataMatrix,labels);
}
private static ScoredFeatures condMIFixup(ScoredFeatures f) {
int maxVal = f.featureIndices.length;
for (int i = 0; i < f.featureIndices.length; i++) {
if (f.featureIndices[i] == -1) {
maxVal = i;
break;
}
}
int[] newIndices = new int[maxVal];
double[] newScores = new double[maxVal];
for (int i = 0; i < maxVal; i++) {
newIndices[i] = f.featureIndices[i];
newScores[i] = f.featureScores[i];
}
return new ScoredFeatures(newIndices,newScores);
}
public static ScoredFeatures CondMI(int numFeatures, int[][] dataMatrix, int[] labels) {
return condMIFixup(feast(2,numFeatures,dataMatrix,labels));
}
public static <T> ScoredFeatures CondMI(int numFeatures, List<List<T>> dataMatrix, List<T> labels) {
return condMIFixup(feast(2,numFeatures,dataMatrix,labels));
}
public static ScoredFeatures DISR(int numFeatures, int[][] dataMatrix, int[] labels) {
return feast(3,numFeatures,dataMatrix,labels);
}
public static <T> ScoredFeatures DISR(int numFeatures, List<List<T>> dataMatrix, List<T> labels) {
return feast(3,numFeatures,dataMatrix,labels);
}
public static ScoredFeatures ICAP(int numFeatures, int[][] dataMatrix, int[] labels) {
return feast(4,numFeatures,dataMatrix,labels);
}
public static <T> ScoredFeatures ICAP(int numFeatures, List<List<T>> dataMatrix, List<T> labels) {
return feast(4,numFeatures,dataMatrix,labels);
}
public static ScoredFeatures JMI(int numFeatures, int[][] dataMatrix, int[] labels) {
return feast(5,numFeatures,dataMatrix,labels);
}
public static <T> ScoredFeatures JMI(int numFeatures, List<List<T>> dataMatrix, List<T> labels) {
return feast(5,numFeatures,dataMatrix,labels);
}
public static ScoredFeatures MIM(int numFeatures, int[][] dataMatrix, int[] labels) {
return feast(6,numFeatures,dataMatrix,labels);
}
public static <T> ScoredFeatures MIM(int numFeatures, List<List<T>> dataMatrix, List<T> labels) {
return feast(6,numFeatures,dataMatrix,labels);
}
public static ScoredFeatures mRMR(int numFeatures, int[][] dataMatrix, int[] labels) {
return feast(7,numFeatures,dataMatrix,labels);
}
public static <T> ScoredFeatures mRMR(int numFeatures, List<List<T>> dataMatrix, List<T> labels) {
return feast(7,numFeatures,dataMatrix,labels);
}
private static <T> ScoredFeatures feast(int flag, int numFeatures, List<List<T>> dataMatrix, List<T> labels) {
return feast(flag,numFeatures,FEASTUtil.convertMatrix(dataMatrix),FEASTUtil.convertList(labels));
}
private static native ScoredFeatures feast(int flag, int numFeatures, int[][] dataMatrix, int[] labels);
}
| 6,081 | 38.23871 | 134 |
java
|
null |
Causal-Learner-main/Code/common/Feast/FEAST/java/src/main/java/craigacp/feast/FEASTUtil.java
|
/*
** FEASTUtil.java
**
** Initial Version - 07/01/2017
** Author - Adam Pocock
**
** Part of the FEAture Selection Toolbox (FEAST), please reference
** "Conditional Likelihood Maximisation: A Unifying Framework for Information
** Theoretic Feature Selection"
** G. Brown, A. Pocock, M.-J. Zhao, M. Lujan
** Journal of Machine Learning Research (JMLR), 2012
**
** Please check www.github.com/Craigacp/FEAST for updates.
**
** Copyright (c) 2010-2017, A. Pocock, G. Brown, The University of Manchester
** All rights reserved.
**
** Redistribution and use in source and binary forms, with or without modification,
** are permitted provided that the following conditions are met:
**
** - Redistributions of source code must retain the above copyright notice, this
** list of conditions and the following disclaimer.
** - Redistributions in binary form must reproduce the above copyright notice,
** this list of conditions and the following disclaimer in the documentation
** and/or other materials provided with the distribution.
** - Neither the name of The University of Manchester nor the names of its
** contributors may be used to endorse or promote products derived from this
** software without specific prior written permission.
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
** ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
** WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
** DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
** ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
** (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
** LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
** ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
** SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**
*/
package craigacp.feast;
import java.util.HashMap;
import java.util.List;
/**
* Util class for FEAST.
*
* @author craigacp
*/
public abstract class FEASTUtil {
private FEASTUtil() {
}
public static int[] convertArray(double[] input) {
int[] output = new int[input.length];
int counter = 0;
HashMap<Double, Integer> idMapping = new HashMap<>();
int idxCounter = 0;
for (Double element : input) {
Integer idx = idMapping.getOrDefault(element, -1);
if (idx == -1) {
idx = idxCounter;
idMapping.put(element, idx);
idxCounter++;
}
output[counter] = idx;
counter++;
}
return output;
}
public static <T> int[] convertList(List<T> input) {
int[] output = new int[input.size()];
int counter = 0;
HashMap<T, Integer> idMapping = new HashMap<>();
int idxCounter = 0;
for (T element : input) {
Integer idx = idMapping.getOrDefault(element, -1);
if (idx == -1) {
idx = idxCounter;
idMapping.put(element, idx);
idxCounter++;
}
output[counter] = idx;
counter++;
}
return output;
}
public static <T> int[][] convertMatrix(List<List<T>> input) {
int[][] output = new int[input.size()][];
int i = 0;
for (List<T> element : input) {
output[i] = convertList(element);
i++;
}
return output;
}
public static int[][] convertMatrix(double[][] input) {
int[][] output = new int[input.length][];
int i = 0;
for (double[] element : input) {
output[i] = convertArray(element);
i++;
}
return output;
}
public static int[][] transpose(int[][] input) {
int[][] output = new int[input[0].length][input.length];
for (int i = 0; i < input.length; i++) {
for (int j = 0; j < input[0].length; j++) {
output[j][i] = input[i][j];
}
}
return output;
}
}
| 4,297 | 30.144928 | 83 |
java
|
null |
Causal-Learner-main/Code/common/Feast/FEAST/java/src/main/java/craigacp/feast/ScoredFeatures.java
|
/*
** ScoredFeatures.java
**
** Initial Version - 07/01/2017
** Author - Adam Pocock
**
** Part of the FEAture Selection Toolbox (FEAST), please reference
** "Conditional Likelihood Maximisation: A Unifying Framework for Information
** Theoretic Feature Selection"
** G. Brown, A. Pocock, M.-J. Zhao, M. Lujan
** Journal of Machine Learning Research (JMLR), 2012
**
** Please check www.github.com/Craigacp/FEAST for updates.
**
** Copyright (c) 2010-2017, A. Pocock, G. Brown, The University of Manchester
** All rights reserved.
**
** Redistribution and use in source and binary forms, with or without modification,
** are permitted provided that the following conditions are met:
**
** - Redistributions of source code must retain the above copyright notice, this
** list of conditions and the following disclaimer.
** - Redistributions in binary form must reproduce the above copyright notice,
** this list of conditions and the following disclaimer in the documentation
** and/or other materials provided with the distribution.
** - Neither the name of The University of Manchester nor the names of its
** contributors may be used to endorse or promote products derived from this
** software without specific prior written permission.
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
** ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
** WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
** DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
** ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
** (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
** LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
** ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
** SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**
*/
package craigacp.feast;
/**
* A tuple class which contains the selected feature indices and their scores.
*
* @author craigacp
*/
public class ScoredFeatures {
public final int[] featureIndices;
public final double[] featureScores;
public ScoredFeatures(int[] featureIndices, double[] featureScores) {
this.featureIndices = featureIndices;
this.featureScores = featureScores;
}
}
| 2,497 | 40.633333 | 83 |
java
|
null |
Causal-Learner-main/Code/common/Feast/FEAST/java/src/main/java/craigacp/feast/Test.java
|
/*
** Test.java
**
** Initial Version - 07/01/2017
** Author - Adam Pocock
**
** Part of the FEAture Selection Toolbox (FEAST), please reference
** "Conditional Likelihood Maximisation: A Unifying Framework for Information
** Theoretic Feature Selection"
** G. Brown, A. Pocock, M.-J. Zhao, M. Lujan
** Journal of Machine Learning Research (JMLR), 2012
**
** Please check www.github.com/Craigacp/FEAST for updates.
**
** Copyright (c) 2010-2017, A. Pocock, G. Brown, The University of Manchester
** All rights reserved.
**
** Redistribution and use in source and binary forms, with or without modification,
** are permitted provided that the following conditions are met:
**
** - Redistributions of source code must retain the above copyright notice, this
** list of conditions and the following disclaimer.
** - Redistributions in binary form must reproduce the above copyright notice,
** this list of conditions and the following disclaimer in the documentation
** and/or other materials provided with the distribution.
** - Neither the name of The University of Manchester nor the names of its
** contributors may be used to endorse or promote products derived from this
** software without specific prior written permission.
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
** ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
** WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
** DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
** ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
** (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
** LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
** ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
** SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**
*/
package craigacp.feast;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author craigacp
*/
public class Test {
public static Dataset readDataset(String filename, int labelIndex) {
try {
List<Integer> labels = new ArrayList<>();
List<List<Integer>> data = new ArrayList<>();
BufferedReader reader = new BufferedReader(new FileReader(filename));
while (reader.ready()) {
String line = reader.readLine();
if (line != null) {
String[] splitLine = line.split(",");
labels.add(Integer.parseInt(splitLine[labelIndex]));
if (data.isEmpty()) {
for (int i = 0; i < splitLine.length-1; i++) {
data.add(new ArrayList<>());
}
}
for (int i = 0; i < splitLine.length; i++) {
if (i < labelIndex) {
data.get(i).add(Integer.parseInt(splitLine[i]));
} else if (i > labelIndex) {
data.get(i-1).add(Integer.parseInt(splitLine[i]));
}
}
}
}
int[] labelsArray = FEASTUtil.convertList(labels);
int[][] dataArray = FEASTUtil.convertMatrix(data);
System.out.println("Dataset loaded from " + filename);
System.out.println("Found " + labelsArray.length + " labels and " + dataArray.length + " features.");
return new Dataset(labelsArray,dataArray);
} catch (FileNotFoundException e) {
System.err.println("File " + filename + " not found. " + e.toString());
} catch (IOException e) {
System.err.println("IOException when reading from " + filename + ". " + e.toString());
}
return null;
}
public static String usage() {
StringBuilder buffer = new StringBuilder();
buffer.append("Test - arguments:\n");
buffer.append(" -c {CIFE,CMIM,CondMI,DISR,ICAP,JMI,MIFS,MIM,mRMR}\n");
buffer.append(" Selection criteria to use.\n");
buffer.append(" -f <filename>\n");
buffer.append(" CSV file to read.\n");
buffer.append(" -l <int>\n");
buffer.append(" Index of the label in the csv.\n");
buffer.append(" -n <int>\n");
buffer.append(" Number of features to select. Defaults to a full ranking.\n");
buffer.append(" -w \n");
buffer.append(" Test weighted variant with all weights = 1.\n");
return buffer.toString();
}
public static void main(String[] args) {
String filename = null;
String fsMethod = null;
int numFeatures = -1;
int labelIndex = -1;
boolean weighted = false;
double[] weights;
for (int i = 0; i < args.length; i++) {
switch (args[i]) {
case "-c":
if ((i + 1) < args.length) {
i++;
fsMethod = args[i];
} else {
System.err.println("Please supply an argument to -c");
System.err.println(usage());
return;
}
break;
case "-f":
if ((i + 1) < args.length) {
i++;
filename = args[i];
} else {
System.err.println("Please supply an argument to -f");
System.err.println(usage());
return;
}
break;
case "-l":
if ((i + 1) < args.length) {
i++;
try {
labelIndex = Integer.parseInt(args[i]);
} catch (NumberFormatException e) {
System.err.println(args[i] + " is not a valid integer");
System.err.println(usage());
return;
}
} else {
System.err.println("Please supply an argument to -l");
System.err.println(usage());
return;
}
break;
case "-n":
if ((i + 1) < args.length) {
i++;
try {
numFeatures = Integer.parseInt(args[i]);
} catch (NumberFormatException e) {
System.err.println(args[i] + " is not a valid integer");
System.err.println(usage());
return;
}
} else {
System.err.println("Please supply an argument to -n");
System.err.println(usage());
return;
}
break;
case "-w":
weighted = true;
break;
default:
System.err.println("Unknown argument " + args[i]);
System.err.println(usage());
return;
}
}
Dataset dataset = null;
if ((filename != null) && (labelIndex != -1)) {
dataset = readDataset(filename, labelIndex);
} else {
System.err.println("Please supply a filename and a label index.");
System.err.println(usage());
return;
}
if ((dataset != null) && (fsMethod != null)) {
if (numFeatures == -1) {
numFeatures = dataset.data.length;
System.out.println("Setting numFeatures to " + numFeatures);
}
ScoredFeatures output = null;
if (weighted) {
weights = new double[dataset.labels.length];
for (int i = 0; i < weights.length; i++) {
weights[i] = 1.0;
}
//{CMIM,CondMI,DISR,JMI,MIM}
switch (fsMethod) {
case "cmim":
case "CMIM":
System.out.println("Using weighted CMIM");
output = WeightedFEAST.CMIM(numFeatures, dataset.data, dataset.labels, weights);
break;
case "condmi":
case "CondMI":
System.out.println("Using weighted CondMI");
output = WeightedFEAST.CondMI(numFeatures, dataset.data, dataset.labels, weights);
break;
case "disr":
case "DISR":
System.out.println("Using weighted DISR");
output = WeightedFEAST.DISR(numFeatures, dataset.data, dataset.labels, weights);
break;
case "jmi":
case "JMI":
System.out.println("Using weighted JMI");
output = WeightedFEAST.JMI(numFeatures, dataset.data, dataset.labels, weights);
break;
case "mim":
case "MIM":
System.out.println("Using weighted MIM");
output = WeightedFEAST.MIM(numFeatures, dataset.data, dataset.labels, weights);
break;
default:
System.err.println("Unknown weighted fs method " + fsMethod);
System.err.println(usage());
return;
}
} else {
//{CIFE,CMIM,CondMI,DISR,ICAP,JMI,MIFS,MIM,mRMR}
switch (fsMethod) {
case "cife":
case "CIFE":
case "fou":
case "FOU":
System.out.println("Using CIFE/FOU");
output = FEAST.BetaGamma(numFeatures, dataset.data, dataset.labels, 1.0, 1.0);
break;
case "cmim":
case "CMIM":
System.out.println("Using CMIM");
output = FEAST.CMIM(numFeatures, dataset.data, dataset.labels);
break;
case "condmi":
case "CondMI":
System.out.println("Using CondMI");
output = FEAST.CondMI(numFeatures, dataset.data, dataset.labels);
break;
case "disr":
case "DISR":
System.out.println("Using DISR");
output = FEAST.DISR(numFeatures, dataset.data, dataset.labels);
break;
case "icap":
case "ICAP":
System.out.println("Using ICAP");
output = FEAST.ICAP(numFeatures, dataset.data, dataset.labels);
break;
case "jmi":
case "JMI":
System.out.println("Using JMI");
output = FEAST.JMI(numFeatures, dataset.data, dataset.labels);
break;
case "mifs":
case "MIFS":
System.out.println("Using MIFS");
output = FEAST.BetaGamma(numFeatures, dataset.data, dataset.labels, 1.0, 0.0);
break;
case "mim":
case "MIM":
System.out.println("Using MIM");
output = FEAST.MIM(numFeatures, dataset.data, dataset.labels);
break;
case "mrmr":
case "mRMR":
System.out.println("Using mRMR");
output = FEAST.mRMR(numFeatures, dataset.data, dataset.labels);
break;
default:
System.err.println("Unknown fs method " + fsMethod);
System.err.println(usage());
return;
}
}
if (output != null) {
System.out.println("Selected features (index,score):");
for (int i = 0; i < output.featureIndices.length; i++) {
System.out.println("\t" + output.featureIndices[i] + ",\t" + output.featureScores[i]);
}
} else {
System.err.println("Feature selection failed");
}
} else {
System.err.println("Please select a fs method, and load a valid dataset");
System.err.println(usage());
return;
}
}
}
| 13,379 | 41.884615 | 113 |
java
|
null |
Causal-Learner-main/Code/common/Feast/FEAST/java/src/main/java/craigacp/feast/WeightedFEAST.java
|
/*
** WeightedFEAST.java
**
** Initial Version - 07/01/2017
** Author - Adam Pocock
**
** Part of the Feature Selection Toolbox, please reference
** "Information Theoretic Feature Selection for Cost-Sensitive Problems"
** A. Pocock, N. Edakunni, M.-J. Zhao, M. Lujan, G. Brown
** ArXiv, 2017
**
** Please check www.github.com/Craigacp/FEAST for updates.
**
** Copyright (c) 2010-2017, A. Pocock, G. Brown, The University of Manchester
** All rights reserved.
**
** Redistribution and use in source and binary forms, with or without modification,
** are permitted provided that the following conditions are met:
**
** - Redistributions of source code must retain the above copyright notice, this
** list of conditions and the following disclaimer.
** - Redistributions in binary form must reproduce the above copyright notice,
** this list of conditions and the following disclaimer in the documentation
** and/or other materials provided with the distribution.
** - Neither the name of The University of Manchester nor the names of its
** contributors may be used to endorse or promote products derived from this
** software without specific prior written permission.
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
** ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
** WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
** DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
** ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
** (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
** LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
** ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
** SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**
*/
package craigacp.feast;
import java.util.List;
/**
* The accessor class for Weighted FEAST algorithms.
*
* Flag mapping:
* 1 = CMIM
* 2 = CondMI
* 3 = DISR
* 4 = JMI
* 5 = MIM
*
* @author craigacp
*/
public abstract class WeightedFEAST {
private WeightedFEAST() {}
static {
System.loadLibrary("feast-java");
}
public static ScoredFeatures CMIM(int numFeatures, int[][] dataMatrix, int[] labels, double[] weights) {
return weightedFeast(1,numFeatures,dataMatrix,labels,weights);
}
public static <T> ScoredFeatures CMIM(int numFeatures, List<List<T>> dataMatrix, List<T> labels, double[] weights) {
return weightedFeast(1,numFeatures,dataMatrix,labels,weights);
}
private static ScoredFeatures condMIFixup(ScoredFeatures f) {
int maxVal = f.featureIndices.length;
for (int i = 0; i < f.featureIndices.length; i++) {
if (f.featureIndices[i] == -1) {
maxVal = i;
break;
}
}
int[] newIndices = new int[maxVal];
double[] newScores = new double[maxVal];
for (int i = 0; i < maxVal; i++) {
newIndices[i] = f.featureIndices[i];
newScores[i] = f.featureScores[i];
}
return new ScoredFeatures(newIndices,newScores);
}
public static ScoredFeatures CondMI(int numFeatures, int[][] dataMatrix, int[] labels, double[] weights) {
return condMIFixup(weightedFeast(2,numFeatures,dataMatrix,labels,weights));
}
public static <T> ScoredFeatures CondMI(int numFeatures, List<List<T>> dataMatrix, List<T> labels, double[] weights) {
return condMIFixup(weightedFeast(2,numFeatures,dataMatrix,labels,weights));
}
public static ScoredFeatures DISR(int numFeatures, int[][] dataMatrix, int[] labels, double[] weights) {
return weightedFeast(3,numFeatures,dataMatrix,labels,weights);
}
public static <T> ScoredFeatures DISR(int numFeatures, List<List<T>> dataMatrix, List<T> labels, double[] weights) {
return weightedFeast(3,numFeatures,dataMatrix,labels,weights);
}
public static ScoredFeatures JMI(int numFeatures, int[][] dataMatrix, int[] labels, double[] weights) {
return weightedFeast(4,numFeatures,dataMatrix,labels,weights);
}
public static <T> ScoredFeatures JMI(int numFeatures, List<List<T>> dataMatrix, List<T> labels, double[] weights) {
return weightedFeast(4,numFeatures,dataMatrix,labels,weights);
}
public static ScoredFeatures MIM(int numFeatures, int[][] dataMatrix, int[] labels, double[] weights) {
return weightedFeast(5,numFeatures,dataMatrix,labels,weights);
}
public static <T> ScoredFeatures MIM(int numFeatures, List<List<T>> dataMatrix, List<T> labels, double[] weights) {
return weightedFeast(5,numFeatures,dataMatrix,labels,weights);
}
private static <T> ScoredFeatures weightedFeast(int flag, int numFeatures, List<List<T>> dataMatrix, List<T> labels, double[] weights) {
return weightedFeast(flag,numFeatures,FEASTUtil.convertMatrix(dataMatrix),FEASTUtil.convertList(labels),weights);
}
private static native ScoredFeatures weightedFeast(int flag, int numFeatures, int[][] dataMatrix, int[] labels, double[] weights);
}
| 5,342 | 40.742188 | 140 |
java
|
SeqTrans
|
SeqTrans-master/Migration/src/collect/AccumulateGroundTruth.java
|
package collect;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecuteResultHandler;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.ExecuteException;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.diff.DiffEntry;
import org.eclipse.jgit.diff.DiffFormatter;
import org.eclipse.jgit.diff.RawTextComparator;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
import org.eclipse.jgit.treewalk.CanonicalTreeParser;
import structure.ChangePair;
public class AccumulateGroundTruth {
/**
* Collecting Ground Truth by Accumulating diffs in the same file.
* @throws Exception
*/
private static HashMap<String, ArrayList<String>> accuDiffFileMaps = new HashMap<String, ArrayList<String>>();
private static HashMap<String, Integer> accuDiffLineMaps = new HashMap<String, Integer>();
public static void main(String[] args) throws Exception {
String versionCommit="8fd0197cd3710786212a5bba1545bc9513fe74cc";//ҪCommit Hash
String path="J:\\junit4\\";//ӦĿڱRepo·
autoExtraction(versionCommit, path);
}
public static void autoExtraction(String versionCommit, String classPath) throws Exception {
FileRepositoryBuilder builder = new FileRepositoryBuilder();
builder.setMustExist(true);
builder.addCeilingDirectory(new File(classPath));
builder.findGitDir(new File(classPath));
Repository repo;
repo = builder.build();
RevWalk walk = new RevWalk(repo);
ObjectId versionId=repo.resolve(versionCommit);
RevCommit currentCommit=walk.parseCommit(versionId);
// System.out.println(currentCommit.getName());
// List<DiffEntry> diffFix=getChangedFileList(currentCommit, repo);
// System.out.println("size:"+diffFix.size());
LinkedList<RevCommit> commits = getCommitList(currentCommit, repo);
walk.close();
for(int i=0;i<commits.size()-1;i++) {
RevCommit newCommit = commits.get(i);
RevCommit oldCommit = commits.get(i+1);
filterDiffs(newCommit, oldCommit, repo);
}
System.out.println("accuSize:"+accuDiffFileMaps.size());
for(Map.Entry<String, ArrayList<String>> entry : accuDiffFileMaps.entrySet()) {
String name = entry.getKey();
ArrayList<String> arrys = entry.getValue();
System.out.println(name+","+accuDiffLineMaps.get(name));
if(arrys.size()>=2) {
String oldCommit = arrys.get(0);
System.out.println("oldCommit:"+oldCommit);
String newCommit = arrys.get(arrys.size()-1);
System.out.println("newCommit:"+newCommit);
int oldLOC = readLOC(classPath, name, oldCommit, repo);
int newLOC = readLOC(classPath, name, newCommit, repo);
System.out.println("LOC:"+oldLOC+","+newLOC);
}
}
// for(Map.Entry<String, Integer> entry : accuDiffLineMaps.entrySet()) {
// String name = entry.getKey();
// int DiffNum = entry.getValue();
// System.out.println(name+","+DiffNum);
// }
}
public static Integer readLOC(String classPath, String fileName, String commit, Repository repo) throws Exception {
String commitName = commit;
String command = "cmd.exe /C git checkout "+commitName;
CommandLine cmdLine = CommandLine.parse(command);
DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
DefaultExecutor executor = new DefaultExecutor();
executor.setExitValue(1); //ִ˳ֵΪ1ɹִвûд1
executor.setWorkingDirectory(new File(classPath));//ùĿ¼
executor.execute(cmdLine, resultHandler);
Thread.sleep(5000);
RevWalk walk = new RevWalk(repo);
ObjectId versionId=repo.resolve(commit);
RevCommit currentCommit=walk.parseCommit(versionId);
System.out.println(currentCommit.getName());
List<DiffEntry> changedList = getChangedFileList(currentCommit, repo);
walk.close();
if(changedList==null)
return 0;
for(DiffEntry diff : changedList) {
String newAddPath = diff.getNewPath();
String[] arrys = newAddPath.split("/");
String newAddFile = arrys[arrys.length-1];
if(newAddFile.equals(fileName)) {
String fileroot = classPath+"\\"+newAddPath;
File file = new File(fileroot);
BufferedReader br = new BufferedReader(new FileReader(file));
String tmpLine = "";
int count = 0;
while((tmpLine=br.readLine())!=null) {
if(tmpLine!=null)
count++;
}
br.close();
return count;
}
}
return 0;
}
public static ChangePair filterDiffs(RevCommit commit1, RevCommit commit2, Repository repo) throws Exception {
List<DiffEntry> returnDiffs = null;
ObjectId head = commit1.getTree().getId();
ObjectId oldHead = commit2.getTree().getId();
// System.out.println("Printing diff between the Revisions: " + commit1.getName() + " and " + commit2.getName());
// prepare two iterators to compute the diffs
try (ObjectReader reader = repo.newObjectReader()) {
CanonicalTreeParser oldTreeIter = new CanonicalTreeParser();
oldTreeIter.reset(reader, oldHead);
CanonicalTreeParser newTreeIter = new CanonicalTreeParser();
newTreeIter.reset(reader, head);
// finally get the list of changed files
try (Git git = new Git(repo)) {
List<DiffEntry> diffs= git.diff()
.setNewTree(newTreeIter)
.setOldTree(oldTreeIter)
.call();
returnDiffs=diffs;
for (DiffEntry entry : returnDiffs) {
String newAddPath = entry.getNewPath();
// System.out.println(newAddPath);
String[] arrys = newAddPath.split("/");
String newAddFile = arrys[arrys.length-1];
if(!newAddFile.contains(".java"))
continue;
ByteArrayOutputStream out = new ByteArrayOutputStream();
DiffFormatter df = new DiffFormatter(out);
df.setDiffComparator(RawTextComparator.WS_IGNORE_ALL);
df.setRepository(repo);
df.format(entry);
String diffText = out.toString("UTF-8");
// System.out.println(diffText);
String[] lines = diffText.split("\n");
int diffLineNum = 0;
for(String line : lines) {
if(line.length()<=1)
continue;
String firstChar = line.substring(0, 1);
String secondChar = line.substring(1, 2);
if(firstChar.equals("-")||firstChar.equals("+")) {
if(secondChar.equals("-")||secondChar.equals("+"))
continue;//־ӲdiffDZļ
String diffLine = line.substring(1);
// System.out.println("line:"+diffLine);
if (diffLine!=null) {
diffLineNum++;
}
}
}
if(accuDiffLineMaps.get(newAddFile)!=null) {
diffLineNum = diffLineNum+accuDiffLineMaps.get(newAddFile);
}
accuDiffLineMaps.put(newAddFile, diffLineNum);
// System.out.println("lines:"+lines.length);
if(accuDiffFileMaps.get(newAddFile)==null) {
ArrayList<String> commits = new ArrayList<String>();
commits.add(commit1.getName());
accuDiffFileMaps.put(newAddFile, commits);
}else {
accuDiffFileMaps.get(newAddFile).add(commit1.getName());
}
df.close();
}
} catch (GitAPIException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
ChangePair cp = new ChangePair(commit1, commit2, returnDiffs);
return cp;
}
static List<DiffEntry> getChangedFileList(RevCommit revCommit, Repository repo) {
List<DiffEntry> returnDiffs = null;
try {
RevCommit previsouCommit=getPrevHash(revCommit,repo);
if(previsouCommit==null)
return null;
ObjectId head=revCommit.getTree().getId();
ObjectId oldHead=previsouCommit.getTree().getId();
System.out.println("Printing diff between the Revisions: " + revCommit.getName() + " and " + previsouCommit.getName());
// prepare the two iterators to compute the diff between
try (ObjectReader reader = repo.newObjectReader()) {
CanonicalTreeParser oldTreeIter = new CanonicalTreeParser();
oldTreeIter.reset(reader, oldHead);
CanonicalTreeParser newTreeIter = new CanonicalTreeParser();
newTreeIter.reset(reader, head);
// finally get the list of changed files
try (Git git = new Git(repo)) {
List<DiffEntry> diffs= git.diff()
.setNewTree(newTreeIter)
.setOldTree(oldTreeIter)
.call();
returnDiffs=diffs;
} catch (GitAPIException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnDiffs;
}
public static RevCommit getPrevHash(RevCommit commit, Repository repo) throws IOException {
try (RevWalk walk = new RevWalk(repo)) {
// Starting point
walk.markStart(commit);
int count = 0;
for (RevCommit rev : walk) {
// got the previous commit.
if (count == 1) {
return rev;
}
count++;
}
walk.dispose();
}
//Reached end and no previous commits.
return null;
}
public static LinkedList<RevCommit> getCommitList(RevCommit startCommit, Repository repo) throws Exception{
LinkedList<RevCommit> commits = new LinkedList<RevCommit>();
RevWalk walk = new RevWalk(repo);
walk.markStart(startCommit);
for(RevCommit rev : walk) {
commits.add(rev);
}
walk.close();
return commits;
}
}
| 10,276 | 36.370909 | 122 |
java
|
SeqTrans
|
SeqTrans-master/Migration/src/collect/CollectAndroidDiff.java
|
package collect;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecuteResultHandler;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.ExecuteWatchdog;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.diff.DiffEntry;
import org.eclipse.jgit.diff.DiffFormatter;
import org.eclipse.jgit.diff.RawTextComparator;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
import org.eclipse.jgit.treewalk.CanonicalTreeParser;
import structure.ChangePair;
import utils.FileOperation;
public class CollectAndroidDiff {
/**
* Collecting API change pairs from git commit diff logs
* @throws Exception
*/
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
String dataPath="D:\\workspace\\Pycharm\\20191222-Vulnerability-dataset\\Android_CVE.csv";//ҪCommit Hash
String rootPath="I:\\gitRepos\\";//ӦĿڱRepo·
String diskpath = "I:\\Vulnerability_commit_android_cpp\\";
autoExtraction(dataPath, rootPath, diskpath);
// getChangeList(versionCommit, path);
}
public static void autoExtraction(String dataPath, String rootPath, String diskpath) throws Exception {
File csvFile = new File(dataPath);
BufferedReader br = new BufferedReader(new FileReader(csvFile));
String tmpline = "";
ArrayList<String> lines = new ArrayList<String>();
while((tmpline=br.readLine())!=null) {
lines.add(tmpline);
}
br.close();
// int n = continueProcess(diskpath);
int n = 0;
for(int i=n;i<lines.size();i++) {
System.out.println("round:"+i);
String line = lines.get(i);
String[] tokens = line.split(",");
String CVE = tokens[0];
String URL = tokens[1];
String[] tokens1 = URL.split("/");
String repoName = "";
for(int j=3;j<tokens1.length-2;j++) {
repoName += tokens1[j]+"_";
}
repoName = repoName.substring(0, repoName.length()-1);
String commit = URL.split("/")[URL.split("/").length-1];
String classPath = rootPath+repoName+"\\.git";
System.out.println(classPath);
File repoDir = new File(classPath);
if (!repoDir.exists()) {
System.err.println(repoName+" not exists!");
continue;
}
FileRepositoryBuilder builder = new FileRepositoryBuilder();
builder.setMustExist(true);
// builder.addCeilingDirectory(new File(classPath));
Repository repo = builder.setGitDir(new File(classPath))
.readEnvironment()
.findGitDir()
.build();
RevWalk walk = new RevWalk(repo);
ObjectId versionId=repo.resolve(commit);
ChangePair cp = new ChangePair();
try {
RevCommit currentCommit=walk.parseCommit(versionId); //쳣
System.out.println("Commit:"+currentCommit.getName());
cp = getChangPair(currentCommit, repo);
cp.setRootPath(rootPath);
cp.setRepoName(repoName);
} catch (Exception e) {
e.printStackTrace();
n++;
continue;
}
RevCommit newCommit = cp.getNewCommit();
RevCommit oldCommit = cp.getOldCommit();
String newCommitName = newCommit.getName();
String oldCommitName = oldCommit.getName();
System.out.println("cp"+n+":"+oldCommitName+";"+newCommitName);
n = runExec(CVE, cp, repo, n, diskpath);
n++;
walk.close();
}
System.out.println("CPsize:"+n);
}
private static int runExec(String CVE, ChangePair cp, Repository repo, int n, String diskpath) throws Exception {
RevCommit newCommit = cp.getNewCommit();
RevCommit oldCommit = cp.getOldCommit();
String newCommitName = newCommit.getName();
String oldCommitName = oldCommit.getName();
String rootPath = cp.getRootPath();
String repoName = cp.getRepoName();
String classPath = rootPath+repoName+"\\";
String movePath = diskpath+"cp"+String.valueOf(n)+"\\"+newCommitName+"\\";
String line = "cmd.exe /C git checkout -f "+newCommitName;
int[] exitvalues = {0, 1};
System.out.println(line);
CommandLine cmdLine = CommandLine.parse(line);
DefaultExecutor executor = new DefaultExecutor();
ExecuteWatchdog watchdog = new ExecuteWatchdog(300000);//timeout 5min
executor.setExitValues(exitvalues); //ִ˳ֵΪ1ɹִвûд1
executor.setWorkingDirectory(new File(classPath));//ùĿ¼
executor.setWatchdog(watchdog);
executor.execute(cmdLine);//ͬ첽
Thread.sleep(6000);
List<DiffEntry> diffs = cp.getDiffs();
ArrayList<DiffEntry> filterDiffs = getUsefulDiffs(diffs);
System.out.println("Diffsize:"+filterDiffs.size());
if(filterDiffs.size()==0) {
return n;// continue the next iter
}
String diffDir = diskpath+"cp"+String.valueOf(n)+"\\diff_logs\\";
File diffDirFile = new File(diffDir);
if (!diffDirFile.exists()) {
diffDirFile.mkdirs();
}
int count = 0;
for (DiffEntry entry : filterDiffs) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
DiffFormatter df = new DiffFormatter(out);
df.setDiffComparator(RawTextComparator.WS_IGNORE_ALL);
df.setRepository(repo);
String path = diffDir+"diff"+String.valueOf(count)+".txt";
BufferedWriter wr = new BufferedWriter(new FileWriter(new File(path)));
df.format(entry);
String diffText = out.toString("UTF-8");
// System.out.println(diffText);
wr.append(diffText);
wr.close();
df.close();
count++;
}
String diffPath = diskpath+"cp"+String.valueOf(n)+"\\diffs.txt";
File diffFile = new File(diffPath);
if (!diffFile.getParentFile().exists()) {
diffFile.getParentFile().mkdirs();
}
String tagPath = diskpath+"cp"+String.valueOf(n)+"\\tags.txt";
BufferedWriter wr = new BufferedWriter(new FileWriter(diffFile));
BufferedWriter wr1 = new BufferedWriter(new FileWriter(tagPath));
wr.append(cp.getRepoName());
wr.newLine();
wr.append(oldCommitName+";"+newCommitName);
wr.newLine();
wr.flush();//һcommit hash
wr1.append("newCommit:\n"+newCommit.getFullMessage());
wr1.newLine();
wr1.append("oldCommit:\n"+oldCommit.getFullMessage());
wr1.close();
for (DiffEntry entry : filterDiffs) {
wr.append(entry.getOldPath()+";"+entry.getNewPath());
wr.newLine();
wr.flush();
String newFilePath = classPath+entry.getNewPath();
String copyPath = movePath+entry.getNewPath();
FileOperation.copyFile(new File(newFilePath), new File(copyPath));//copy changeFile
}
wr.close();
// Thread.sleep(5000);
String movePath1 = diskpath+"cp"+String.valueOf(n)+"\\"+oldCommitName+"\\";
String line1 = "cmd.exe /C git checkout -f "+oldCommitName;
CommandLine cmdLine1 = CommandLine.parse(line1);
DefaultExecuteResultHandler resultHandler1 = new DefaultExecuteResultHandler();
DefaultExecutor executor1 = new DefaultExecutor();
ExecuteWatchdog watchdog1 = new ExecuteWatchdog(300000);//timeout 10s
executor1.setExitValues(exitvalues); //ִ˳ֵΪ1ɹִвûд1
executor1.setWorkingDirectory(new File(classPath));//ùĿ¼
executor1.setWatchdog(watchdog1);
executor1.execute(cmdLine1, resultHandler1);
Thread.sleep(6000);
for (DiffEntry entry : filterDiffs) {
String oldFilePath = classPath+entry.getOldPath();
String copyPath = movePath1+entry.getOldPath();
FileOperation.copyFile(new File(oldFilePath), new File(copyPath));//copy changeFile
}
resultHandler1.waitFor();
// Thread.sleep(5000);
return n;
}//Execute checkout and copy diffs
public static ArrayList<DiffEntry> getUsefulDiffs(List<DiffEntry> diffs){
ArrayList<DiffEntry> filterDiffs = new ArrayList<DiffEntry>();
for (DiffEntry entry : diffs) {
String oldFilePath = entry.getOldPath();
System.out.println("old:"+oldFilePath);
String newFilePath = entry.getNewPath();
System.out.println("new:"+newFilePath);
System.out.println("---------");
if(oldFilePath.contains("/dev/null")||newFilePath.contains("/dev/null")) {
continue;//ɾļļʾ·ΪҪchangepair
// }else if(oldFilePath.contains(".java")&&newFilePath.contains(".java")){
// filterDiffs.add(entry);
}
else if((oldFilePath.contains(".cpp")||oldFilePath.contains(".CPP")||
oldFilePath.contains(".cc")||oldFilePath.contains(".h")||oldFilePath.contains(".c"))
&&(newFilePath.contains(".cpp")||newFilePath.contains(".CPP")||
newFilePath.contains(".cc")||newFilePath.contains(".h")||newFilePath.contains(".c"))){
filterDiffs.add(entry);
}
}//ɾļcommitҲ
return filterDiffs;
}
public static Integer continueProcess(String rootPath) {
File rootFile = new File(rootPath);
File[] dirs = rootFile.listFiles();
int n = 0;
for(File dir : dirs) {
String cpName = dir.getName();
int cpNum = Integer.valueOf(cpName.substring(2, cpName.length()));
if(cpNum>=n) {
n = cpNum+1;
}
}
return n;
}//get cpNumber and continue from the next number
public static ChangePair getChangPair(RevCommit revCommit, Repository repo) throws Exception {
List<DiffEntry> returnDiffs = null;
RevCommit previousCommit=getPrevHash(revCommit, repo);
System.out.println("PrevCommit:"+previousCommit.getName());
try {
ObjectId head=revCommit.getTree().getId();
ObjectId oldHead=previousCommit.getTree().getId();
System.out.println("Printing diff between the Revisions: " + revCommit.getName() + " and " + previousCommit.getName());
// prepare the two iterators to compute the diff between
try (ObjectReader reader = repo.newObjectReader()) {
CanonicalTreeParser oldTreeIter = new CanonicalTreeParser();
oldTreeIter.reset(reader, oldHead);
CanonicalTreeParser newTreeIter = new CanonicalTreeParser();
newTreeIter.reset(reader, head);
// finally get the list of changed files
try (Git git = new Git(repo)) {
List<DiffEntry> diffs= git.diff()
.setNewTree(newTreeIter)
.setOldTree(oldTreeIter)
.call();
returnDiffs=diffs;
} catch (GitAPIException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ChangePair cp = new ChangePair(revCommit, previousCommit, returnDiffs);
return cp;
}
public static RevCommit getPrevHash(RevCommit commit, Repository repo) throws IOException {
RevWalk revWalk = new RevWalk(repo);
RevCommit previous = revWalk.parseCommit(commit.getParent(0).getId());
//Reached end and no previous commits.
revWalk.close();
return previous;
}
static void printTime(int commitTime) {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String timestampString=String.valueOf(commitTime);
Long timestamp = Long.parseLong(timestampString) * 1000;
String date = formatter.format(new Date(timestamp));
System.out.println(date);
}
}
| 11,497 | 35.971061 | 122 |
java
|
SeqTrans
|
SeqTrans-master/Migration/src/collect/CollectApiDiffs.java
|
package collect;
import structure.API;
import utils.ReadAPI;
import java.io.*;
import java.util.ArrayList;
import java.util.LinkedHashSet;
public class CollectApiDiffs {
private static LinkedHashSet<API> apis = new LinkedHashSet<API>();
public static void main(String args[]) throws Exception{
String path = "apis";
apis = ReadAPI.readAPI(path);
System.out.println(apis.size());
ArrayList<String> realDiffs = new ArrayList<String>();
String diffPath = "data\\";
File diffDir = new File(diffPath);
File[] diffs = diffDir.listFiles();
for(int i=0;i<diffs.length;i++) {
File diffFile = diffs[i];
System.out.println("processing:"+diffFile.getName());
if(diffFile.getName().contains("def")) {
BufferedReader br = new BufferedReader(new FileReader(diffFile));
String tmpline = "";
while((tmpline=br.readLine())!=null) {
// System.out.println(tmpline);
if(containsAPI(tmpline, tmpline)) {
realDiffs.add(tmpline);
}
}
br.close();
}
}
File outFile = new File("realDiff.txt");
BufferedWriter wr = new BufferedWriter(new FileWriter(outFile));
for(String line : realDiffs) {
wr.append(line);
wr.newLine();
wr.flush();
}
wr.close();
}
public static Boolean containsAPI(String src, String dst) {
for(API api : apis) {
String cName = api.getClassName();
String mName = api.getMethodName();
if(src.contains(mName)) {
// if(src.contains(cName)) {
// System.out.println("find public static api");
// }
// else System.out.println("find api");
return true;
}
if(dst.contains(mName)) {
// if(dst.contains(cName)) {
// System.out.println("find public static api");
// }
// else System.out.println("find api");
return true;
}
}
return false;
}
}
| 1,787 | 24.913043 | 69 |
java
|
SeqTrans
|
SeqTrans-master/Migration/src/collect/CollectCommitDiff.java
|
package collect;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecuteResultHandler;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.ExecuteWatchdog;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.diff.DiffEntry;
import org.eclipse.jgit.diff.DiffFormatter;
import org.eclipse.jgit.diff.RawTextComparator;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
import org.eclipse.jgit.treewalk.CanonicalTreeParser;
import structure.ChangePair;
import utils.FileOperation;
public class CollectCommitDiff {
/**
* Collecting API change pairs from git commit diff logs
* @throws Exception
*/
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
String dataPath="D:\\workspace\\Pycharm\\20191222-Vulnerability-dataset\\dataset.csv";//��Ҫ������Commit Hash
String rootPath="J:\\git_repo\\";//��Ӧ��Ŀ�ڱ���Repo��·��
autoExtraction(dataPath, rootPath);
// getChangeList(versionCommit, path);
}
public static void autoExtraction(String dataPath, String rootPath) throws Exception {
File csvFile = new File(dataPath);
BufferedReader br = new BufferedReader(new FileReader(csvFile));
String tmpline = "";
ArrayList<String> lines = new ArrayList<String>();
while((tmpline=br.readLine())!=null) {
lines.add(tmpline);
}
br.close();
int n=0;
for(int i=0;i<lines.size();i++) {
System.out.println("round:"+i);
String line = lines.get(i);
String[] tokens = line.split(",");
String CVE = tokens[0];
String URL = tokens[1];
String commit = tokens[2];
String repoName = URL.split("/")[URL.split("/").length-1];
System.out.println(repoName+","+commit);
String classPath = rootPath+repoName;
File repoDir = new File(classPath);
if (!repoDir.exists()) {
System.err.println(repoName+" not exists!");
continue;
}
FileRepositoryBuilder builder = new FileRepositoryBuilder();
builder.setMustExist(true);
builder.addCeilingDirectory(new File(classPath));
builder.findGitDir(new File(classPath));
Repository repo;
repo = builder.build();
RevWalk walk = new RevWalk(repo);
ObjectId versionId=repo.resolve(commit);
ChangePair cp = new ChangePair();
try {
RevCommit currentCommit=walk.parseCommit(versionId); //�쳣��
cp = getChangPair(currentCommit, repo);
cp.setRootPath(rootPath);
cp.setRepoName(repoName);
} catch (Exception e) {
e.printStackTrace();
continue;
}
RevCommit newCommit = cp.getNewCommit();
RevCommit oldCommit = cp.getOldCommit();
String newCommitName = newCommit.getName();
String oldCommitName = oldCommit.getName();
System.out.println("cp"+n+":"+oldCommitName+";"+newCommitName);
n++;
// n = runExec(CVE, cp, repo, n);
walk.close();
}
System.out.println("CPsize:"+n);
}
private static int runExec(String CVE, ChangePair cp, Repository repo, int n) throws Exception {
String diskpath = "J:\\Vulnerability_commit\\";
RevCommit newCommit = cp.getNewCommit();
RevCommit oldCommit = cp.getOldCommit();
String newCommitName = newCommit.getName();
String oldCommitName = oldCommit.getName();
String rootPath = cp.getRootPath();
String repoName = cp.getRepoName();
String classPath = rootPath+repoName+"\\";
String movePath = diskpath+"cp"+String.valueOf(n)+"\\"+newCommitName+"\\";
String line = "cmd.exe /C git checkout "+newCommitName;
int[] exitvalues = {0, 1};
System.out.println(line);
CommandLine cmdLine = CommandLine.parse(line);
DefaultExecutor executor = new DefaultExecutor();
ExecuteWatchdog watchdog = new ExecuteWatchdog(10000);//timeout 10s
executor.setExitValues(exitvalues); //��������ִ���˳�ֵΪ1���������ɹ�ִ�в���û�д�����1
executor.setWorkingDirectory(new File(classPath));//���ù���Ŀ¼
executor.setWatchdog(watchdog);
executor.execute(cmdLine);//ͬ�������첽
// Thread.sleep(1000);
List<DiffEntry> diffs = cp.getDiffs();
ArrayList<DiffEntry> filterDiffs = getUsefulDiffs(diffs);
System.out.println("Diffsize:"+filterDiffs.size());
if(filterDiffs.size()==0) {
return n;// continue the next iter
}
String diffDir = diskpath+"cp"+String.valueOf(n)+"\\diff_logs\\";
File diffDirFile = new File(diffDir);
if (!diffDirFile.exists()) {
diffDirFile.mkdirs();
}
int count = 0;
for (DiffEntry entry : filterDiffs) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
DiffFormatter df = new DiffFormatter(out);
df.setDiffComparator(RawTextComparator.WS_IGNORE_ALL);
df.setRepository(repo);
String path = diffDir+"diff"+String.valueOf(count)+".txt";
BufferedWriter wr = new BufferedWriter(new FileWriter(new File(path)));
df.format(entry);
String diffText = out.toString("UTF-8");
// System.out.println(diffText);
wr.append(diffText);
wr.close();
df.close();
count++;
}
String diffPath = diskpath+"cp"+String.valueOf(n)+"\\diffs.txt";
File diffFile = new File(diffPath);
if (!diffFile.getParentFile().exists()) {
diffFile.getParentFile().mkdirs();
}
String tagPath = diskpath+"cp"+String.valueOf(n)+"\\tags.txt";
BufferedWriter wr = new BufferedWriter(new FileWriter(diffFile));
BufferedWriter wr1 = new BufferedWriter(new FileWriter(tagPath));
wr.append(cp.getRepoName());
wr.newLine();
wr.append(oldCommitName+";"+newCommitName);
wr.newLine();
wr.flush();//��һ�����commit hash
wr1.append("newCommit:\n"+newCommit.getFullMessage());
wr1.newLine();
wr1.append("oldCommit:\n"+oldCommit.getFullMessage());
wr1.close();
for (DiffEntry entry : filterDiffs) {
wr.append(entry.getOldPath()+";"+entry.getNewPath());
wr.newLine();
wr.flush();
String newFilePath = classPath+entry.getNewPath();
String copyPath = movePath+entry.getNewPath();
FileOperation.copyFile(new File(newFilePath), new File(copyPath));//copy changeFile
}
wr.close();
// Thread.sleep(5000);
String movePath1 = diskpath+"cp"+String.valueOf(n)+"\\"+oldCommitName+"\\";
String line1 = "cmd.exe /C git checkout "+oldCommitName;
CommandLine cmdLine1 = CommandLine.parse(line1);
DefaultExecuteResultHandler resultHandler1 = new DefaultExecuteResultHandler();
DefaultExecutor executor1 = new DefaultExecutor();
ExecuteWatchdog watchdog1 = new ExecuteWatchdog(10000);//timeout 10s
executor1.setExitValues(exitvalues); //��������ִ���˳�ֵΪ1���������ɹ�ִ�в���û�д�����1
executor1.setWorkingDirectory(new File(classPath));//���ù���Ŀ¼
executor1.setWatchdog(watchdog1);
executor1.execute(cmdLine1, resultHandler1);
Thread.sleep(1000);
for (DiffEntry entry : filterDiffs) {
String oldFilePath = classPath+entry.getOldPath();
String copyPath = movePath1+entry.getOldPath();
FileOperation.copyFile(new File(oldFilePath), new File(copyPath));//copy changeFile
}
resultHandler1.waitFor();
// Thread.sleep(5000);
n++;
return n;
}//Execute checkout and copy diffs
public static ArrayList<DiffEntry> getUsefulDiffs(List<DiffEntry> diffs){
ArrayList<DiffEntry> filterDiffs = new ArrayList<DiffEntry>();
for (DiffEntry entry : diffs) {
String oldFilePath = entry.getOldPath();
String newFilePath = entry.getNewPath();
if(oldFilePath.contains("/dev/null")||newFilePath.contains("/dev/null")) {
continue;//������ɾ���ļ���������ļ����������ʾ·��Ϊ���������Ҫ����changepair
}else if(oldFilePath.contains(".java")&&newFilePath.contains(".java")){
filterDiffs.add(entry);
}//ɾ�����ļ�commit���Ҳ���
}
return filterDiffs;
}
public static ChangePair getChangPair(RevCommit revCommit, Repository repo) throws Exception {
List<DiffEntry> returnDiffs = null;
RevCommit previousCommit=getPrevHash(revCommit,repo);
try {
if(previousCommit==null)
return null;
ObjectId head=revCommit.getTree().getId();
ObjectId oldHead=previousCommit.getTree().getId();
System.out.println("Printing diff between the Revisions: " + revCommit.getName() + " and " + previousCommit.getName());
// prepare the two iterators to compute the diff between
try (ObjectReader reader = repo.newObjectReader()) {
CanonicalTreeParser oldTreeIter = new CanonicalTreeParser();
oldTreeIter.reset(reader, oldHead);
CanonicalTreeParser newTreeIter = new CanonicalTreeParser();
newTreeIter.reset(reader, head);
// finally get the list of changed files
try (Git git = new Git(repo)) {
List<DiffEntry> diffs= git.diff()
.setNewTree(newTreeIter)
.setOldTree(oldTreeIter)
.call();
List<DiffEntry> modifiedDiffs = new ArrayList<>();
for (DiffEntry diff: diffs){
DiffEntry.ChangeType changeType = diff.getChangeType();
// 如果不是增加文件,也不是拷贝文件
if (changeType != DiffEntry.ChangeType.ADD && changeType != DiffEntry.ChangeType.COPY)
modifiedDiffs.add(diff);
else
continue;
}
returnDiffs = modifiedDiffs;
} catch (GitAPIException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ChangePair cp = new ChangePair(revCommit, previousCommit, returnDiffs);
return cp;
}
public static RevCommit getPrevHash(RevCommit commit, Repository repo) throws IOException {
RevCommit previous = null;
try (RevWalk walk = new RevWalk(repo)) {
// Starting point
walk.markStart(commit);
int count = 0;
for (RevCommit rev : walk) {
// got the previous commit.
if (count == 1) {
previous = rev;
}
count++;
}
}
//Reached end and no previous commits.
return previous;
}
static void printTime(int commitTime) {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String timestampString=String.valueOf(commitTime);
Long timestamp = Long.parseLong(timestampString) * 1000;
String date = formatter.format(new Date(timestamp));
System.out.println(date);
}
}
| 11,142 | 36.267559 | 122 |
java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.