lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | b2439b1711db4d6b901d4636e2a23d3a5f4168a8 | 0 | rcordovano/autopsy,esaunders/autopsy,rcordovano/autopsy,esaunders/autopsy,rcordovano/autopsy,rcordovano/autopsy,rcordovano/autopsy,wschaeferB/autopsy,wschaeferB/autopsy,wschaeferB/autopsy,esaunders/autopsy,esaunders/autopsy,rcordovano/autopsy,wschaeferB/autopsy,esaunders/autopsy,wschaeferB/autopsy | /*
* Autopsy Forensic Browser
*
* Copyright 2013-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.contentviewers;
import java.awt.Component;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
/**
* Shows file metadata as a list to make it easy to copy and paste. Typically
* shows the same data that can also be found in the ResultViewer table, just a
* different order and allows the full path to be visible in the bottom area.
*/
@ServiceProvider(service = DataContentViewer.class, position = 6)
@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives
public class Metadata extends javax.swing.JPanel implements DataContentViewer {
/**
* Creates new form Metadata
*/
public Metadata() {
initComponents();
customizeComponents();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPopupMenu1 = new javax.swing.JPopupMenu();
jScrollPane2 = new javax.swing.JScrollPane();
jTextPane1 = new javax.swing.JTextPane();
setPreferredSize(new java.awt.Dimension(100, 52));
jScrollPane2.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_ALWAYS);
jScrollPane2.setVerticalScrollBarPolicy(javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
jScrollPane2.setPreferredSize(new java.awt.Dimension(610, 52));
jTextPane1.setEditable(false);
jTextPane1.setPreferredSize(new java.awt.Dimension(600, 52));
jScrollPane2.setViewportView(jTextPane1);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JPopupMenu jPopupMenu1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JTextPane jTextPane1;
// End of variables declaration//GEN-END:variables
private void customizeComponents() {
/*
* jTextPane1.setComponentPopupMenu(rightClickMenu); ActionListener
* actList = new ActionListener(){ @Override public void
* actionPerformed(ActionEvent e){ JMenuItem jmi = (JMenuItem)
* e.getSource(); if(jmi.equals(copyMenuItem)) outputViewPane.copy();
* else if(jmi.equals(selectAllMenuItem)) outputViewPane.selectAll(); }
* }; copyMenuItem.addActionListener(actList);
* selectAllMenuItem.addActionListener(actList);
*/
Utilities.configureTextPaneAsHtml(jTextPane1);
}
private void setText(String str) {
jTextPane1.setText("<html><body>" + str + "</body></html>"); //NON-NLS
}
private void startTable(StringBuilder sb) {
sb.append("<table>"); //NON-NLS
}
private void endTable(StringBuilder sb) {
sb.append("</table>"); //NON-NLS
}
private void addRow(StringBuilder sb, String key, String value) {
sb.append("<tr><td valign=\"top\">"); //NON-NLS
sb.append(key);
sb.append("</td><td>"); //NON-NLS
sb.append(value);
sb.append("</td></tr>"); //NON-NLS
}
@Messages({
"Metadata.tableRowTitle.mimeType=MIME Type",
"Metadata.nodeText.truncated=(results truncated)",
"Metadata.tableRowTitle.sha1=SHA1",
"Metadata.tableRowTitle.sha256=SHA256",
"Metadata.tableRowTitle.imageType=Type",
"Metadata.tableRowTitle.sectorSize=Sector Size",
"Metadata.tableRowTitle.timezone=Time Zone",
"Metadata.tableRowTitle.deviceId=Device ID"})
@Override
public void setNode(Node node) {
AbstractFile file = node.getLookup().lookup(AbstractFile.class);
Image image = node.getLookup().lookup((Image.class));
if (file == null && image == null) {
setText(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.nonFilePassedIn"));
return;
}
StringBuilder sb = new StringBuilder();
startTable(sb);
if (file != null) {
try {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), file.getUniquePath());
} catch (TskCoreException ex) {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), file.getParentPath() + "/" + file.getName());
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.type"), file.getType().getName());
addRow(sb, Bundle.Metadata_tableRowTitle_mimeType(), file.getMIMEType());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.size"), Long.toString(file.getSize()));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.fileNameAlloc"), file.getDirFlagAsString());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.metadataAlloc"), file.getMetaFlagsAsString());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.modified"), ContentUtils.getStringTime(file.getMtime(), file));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.accessed"), ContentUtils.getStringTime(file.getAtime(), file));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.created"), ContentUtils.getStringTime(file.getCrtime(), file));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.changed"), ContentUtils.getStringTime(file.getCtime(), file));
String md5 = file.getMd5Hash();
if (md5 == null) {
md5 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.md5"), md5);
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.hashLookupResults"), file.getKnown().toString());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.internalid"), Long.toString(file.getId()));
if (file.getType().compareTo(TSK_DB_FILES_TYPE_ENUM.LOCAL) == 0) {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"), file.getLocalAbsPath());
}
endTable(sb);
/*
* If we have a file system file, grab the more detailed metadata text
* too
*/
try {
if (file instanceof FsContent) {
FsContent fsFile = (FsContent) file;
sb.append("<hr /><pre>\n"); //NON-NLS
sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.text"));
sb.append(" <br /><br />"); // NON-NLS
for (String str : fsFile.getMetaDataText()) {
sb.append(str).append("<br />"); //NON-NLS
/*
* Very long results can cause the UI to hang before displaying,
* so truncate the results if necessary.
*/
if(sb.length() > 50000){
sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.truncated"));
break;
}
}
sb.append("</pre>\n"); //NON-NLS
}
} catch (TskCoreException ex) {
sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.exceptionNotice.text")).append(ex.getLocalizedMessage());
}
} else {
try {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), image.getUniquePath());
} catch (TskCoreException ex) {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), image.getName());
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.imageType"), image.getType().getName());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.size"), Long.toString(image.getSize()));
try {
String md5 = image.getMd5();
if (md5 == null || md5.isEmpty()) {
md5 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.md5"), md5);
String sha1 = image.getSha1();
if (sha1 == null || sha1.isEmpty()) {
sha1 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.sha1"), sha1);
String sha256 = image.getSha256();
if (sha256 == null || sha256.isEmpty()) {
sha256 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.sha256"), sha256);
} catch (TskCoreException ex) {
sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.exceptionNotice.text")).append(ex.getLocalizedMessage());
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.sectorSize"), Long.toString(image.getSsize()));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.timezone"), image.getTimeZone());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.deviceId"), image.getDeviceId());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.internalid"), Long.toString(image.getId()));
// Add all the data source paths to the "Local Path" value cell.
String[] imagePaths = image.getPaths();
StringBuilder pathValues = new StringBuilder("<div>");
pathValues.append(imagePaths[0]);
pathValues.append("</div>");
for (int i=1; i < imagePaths.length; i++) {
pathValues.append("<div>");
pathValues.append(imagePaths[i]);
pathValues.append("</div>");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"), pathValues.toString());
}
setText(sb.toString());
jTextPane1.setCaretPosition(0);
this.setCursor(null);
}
@Override
public String getTitle() {
return NbBundle.getMessage(this.getClass(), "Metadata.title");
}
@Override
public String getToolTip() {
return NbBundle.getMessage(this.getClass(), "Metadata.toolTip");
}
@Override
public DataContentViewer createInstance() {
return new Metadata();
}
@Override
public Component getComponent() {
return this;
}
@Override
public void resetComponent() {
setText("");
}
@Override
public boolean isSupported(Node node) {
Image image = node.getLookup().lookup(Image.class);
AbstractFile file = node.getLookup().lookup(AbstractFile.class);
return (file != null) || (image != null);
}
@Override
public int isPreferred(Node node) {
return 1;
}
}
| Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java | /*
* Autopsy Forensic Browser
*
* Copyright 2013-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.contentviewers;
import java.awt.Component;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
/**
* Shows file metadata as a list to make it easy to copy and paste. Typically
* shows the same data that can also be found in the ResultViewer table, just a
* different order and allows the full path to be visible in the bottom area.
*/
@ServiceProvider(service = DataContentViewer.class, position = 6)
@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives
public class Metadata extends javax.swing.JPanel implements DataContentViewer {
/**
* Creates new form Metadata
*/
public Metadata() {
initComponents();
customizeComponents();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPopupMenu1 = new javax.swing.JPopupMenu();
jScrollPane2 = new javax.swing.JScrollPane();
jTextPane1 = new javax.swing.JTextPane();
setPreferredSize(new java.awt.Dimension(100, 52));
jScrollPane2.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_ALWAYS);
jScrollPane2.setVerticalScrollBarPolicy(javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
jScrollPane2.setPreferredSize(new java.awt.Dimension(610, 52));
jTextPane1.setEditable(false);
jTextPane1.setPreferredSize(new java.awt.Dimension(600, 52));
jScrollPane2.setViewportView(jTextPane1);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JPopupMenu jPopupMenu1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JTextPane jTextPane1;
// End of variables declaration//GEN-END:variables
private void customizeComponents() {
/*
* jTextPane1.setComponentPopupMenu(rightClickMenu); ActionListener
* actList = new ActionListener(){ @Override public void
* actionPerformed(ActionEvent e){ JMenuItem jmi = (JMenuItem)
* e.getSource(); if(jmi.equals(copyMenuItem)) outputViewPane.copy();
* else if(jmi.equals(selectAllMenuItem)) outputViewPane.selectAll(); }
* }; copyMenuItem.addActionListener(actList);
* selectAllMenuItem.addActionListener(actList);
*/
Utilities.configureTextPaneAsHtml(jTextPane1);
}
private void setText(String str) {
jTextPane1.setText("<html><body>" + str + "</body></html>"); //NON-NLS
}
private void startTable(StringBuilder sb) {
sb.append("<table>"); //NON-NLS
}
private void endTable(StringBuilder sb) {
sb.append("</table>"); //NON-NLS
}
private void addRow(StringBuilder sb, String key, String value) {
sb.append("<tr><td valign=\"top\">"); //NON-NLS
sb.append(key);
sb.append("</td><td>"); //NON-NLS
sb.append(value);
sb.append("</td></tr>"); //NON-NLS
}
@Messages({
"Metadata.tableRowTitle.mimeType=MIME Type",
"Metadata.nodeText.truncated=(results truncated)",
"Metadata.tableRowTitle.sha1=SHA1",
"Metadata.tableRowTitle.sha256=SHA256",
"Metadata.tableRowTitle.imageType=Type",
"Metadata.tableRowTitle.sectorSize=Sector Size",
"Metadata.tableRowTitle.timezone=Time Zone",
"Metadata.tableRowTitle.deviceId=Device ID"})
@Override
public void setNode(Node node) {
AbstractFile file = node.getLookup().lookup(AbstractFile.class);
Image image = node.getLookup().lookup((Image.class));
if (file == null && image == null) {
setText(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.nonFilePassedIn"));
return;
}
StringBuilder sb = new StringBuilder();
startTable(sb);
if (file != null) {
try {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), file.getUniquePath());
} catch (TskCoreException ex) {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), file.getParentPath() + "/" + file.getName());
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.type"), file.getType().getName());
addRow(sb, Bundle.Metadata_tableRowTitle_mimeType(), file.getMIMEType());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.size"), Long.toString(file.getSize()));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.fileNameAlloc"), file.getDirFlagAsString());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.metadataAlloc"), file.getMetaFlagsAsString());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.modified"), ContentUtils.getStringTime(file.getMtime(), file));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.accessed"), ContentUtils.getStringTime(file.getAtime(), file));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.created"), ContentUtils.getStringTime(file.getCrtime(), file));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.changed"), ContentUtils.getStringTime(file.getCtime(), file));
String md5 = file.getMd5Hash();
if (md5 == null) {
md5 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.md5"), md5);
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.hashLookupResults"), file.getKnown().toString());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.internalid"), Long.toString(file.getId()));
if (file.getType().compareTo(TSK_DB_FILES_TYPE_ENUM.LOCAL) == 0) {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"), file.getLocalAbsPath());
}
endTable(sb);
/*
* If we have a file system file, grab the more detailed metadata text
* too
*/
try {
if (file instanceof FsContent) {
FsContent fsFile = (FsContent) file;
sb.append("<hr /><pre>\n"); //NON-NLS
sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.text"));
sb.append(" <br /><br />"); // NON-NLS
for (String str : fsFile.getMetaDataText()) {
sb.append(str).append("<br />"); //NON-NLS
/*
* Very long results can cause the UI to hang before displaying,
* so truncate the results if necessary.
*/
if(sb.length() > 50000){
sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.truncated"));
break;
}
}
sb.append("</pre>\n"); //NON-NLS
}
} catch (TskCoreException ex) {
sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.exceptionNotice.text")).append(ex.getLocalizedMessage());
}
} else {
try {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), image.getUniquePath());
} catch (TskCoreException ex) {
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), image.getName());
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.imageType"), image.getType().getName());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.size"), Long.toString(image.getSize()));
try {
String md5 = image.getMd5();
if (md5 == null || md5.isEmpty()) {
md5 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.md5"), md5);
String sha1 = image.getSha1();
if (sha1 == null || sha1.isEmpty()) {
sha1 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.sha1"), sha1);
String sha256 = image.getSha256();
if (sha256 == null || sha256.isEmpty()) {
sha256 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.sha256"), sha256);
} catch (TskCoreException ex) {
sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.exceptionNotice.text")).append(ex.getLocalizedMessage());
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.sectorSize"), Long.toString(image.getSsize()));
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.timezone"), image.getTimeZone());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.deviceId"), image.getDeviceId());
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.internalid"), Long.toString(image.getId()));
// Add all the data source paths to the "Name" value cell.
String[] imagePaths = image.getPaths();
StringBuilder pathValues = new StringBuilder("<div>");
pathValues.append(imagePaths[0]);
pathValues.append("</div>");
for (int i=1; i < imagePaths.length; i++) {
pathValues.append("<div>");
pathValues.append(imagePaths[i]);
pathValues.append("</div>");
}
addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"), pathValues.toString());
}
setText(sb.toString());
jTextPane1.setCaretPosition(0);
this.setCursor(null);
}
@Override
public String getTitle() {
return NbBundle.getMessage(this.getClass(), "Metadata.title");
}
@Override
public String getToolTip() {
return NbBundle.getMessage(this.getClass(), "Metadata.toolTip");
}
@Override
public DataContentViewer createInstance() {
return new Metadata();
}
@Override
public Component getComponent() {
return this;
}
@Override
public void resetComponent() {
setText("");
}
@Override
public boolean isSupported(Node node) {
Image image = node.getLookup().lookup(Image.class);
AbstractFile file = node.getLookup().lookup(AbstractFile.class);
return (file != null) || (image != null);
}
@Override
public int isPreferred(Node node) {
return 1;
}
}
| Corrected typo.
| Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java | Corrected typo. | <ide><path>ore/src/org/sleuthkit/autopsy/contentviewers/Metadata.java
<ide> addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.deviceId"), image.getDeviceId());
<ide> addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.internalid"), Long.toString(image.getId()));
<ide>
<del> // Add all the data source paths to the "Name" value cell.
<add> // Add all the data source paths to the "Local Path" value cell.
<ide> String[] imagePaths = image.getPaths();
<ide> StringBuilder pathValues = new StringBuilder("<div>");
<ide> pathValues.append(imagePaths[0]); |
|
JavaScript | apache-2.0 | c51473be278fcb0cb623898bc3727e351efd0bb3 | 0 | beni55/enyo,zefsolutions/enyo,onecrayon/enyo,mcanthony/enyo,bright-sparks/enyo,bright-sparks/enyo,enyojs/enyo,wikieswan/enyo,enyojs/enyo,beni55/enyo,kustomzone/enyo,soapdog/enyo,PKRoma/enyo,onecrayon/enyo,soapdog/enyo,airspringsoftware/enyo,PKRoma/enyo,wikieswan/enyo,kustomzone/enyo,airspringsoftware/enyo,mcanthony/enyo,zefsolutions/enyo | //* @protected
enyo.requiresWindow(function() {
// add touch-specific gesture feature
var gesture = enyo.gesture;
var oldevents = gesture.events;
//
gesture.events.touchstart = function(e) {
// for duration of this touch, only handle touch events. Old event
// structure will be restored during touchend.
gesture.events = touchGesture;
gesture.events.touchstart(e);
};
//
var touchGesture = {
_touchCount: 0,
touchstart: function(inEvent) {
enyo.job.stop("resetGestureEvents");
this._touchCount += inEvent.changedTouches.length;
this.excludedTarget = null;
var e = this.makeEvent(inEvent);
gesture.down(e);
// generate a new event object since over is a different event
e = this.makeEvent(inEvent);
this.overEvent = e;
gesture.over(e);
},
touchmove: function(inEvent) {
enyo.job.stop("resetGestureEvents");
// NOTE: allow user to supply a node to exclude from event
// target finding via the drag event.
var de = gesture.drag.dragEvent;
this.excludedTarget = de && de.dragInfo && de.dragInfo.node;
var e = this.makeEvent(inEvent);
gesture.move(e);
// prevent default document scrolling if enyo.bodyIsFitting == true
// avoid window scrolling by preventing default on this event
// note: this event can be made unpreventable (native scrollers do this)
if (enyo.bodyIsFitting) {
inEvent.preventDefault();
}
// synthesize over and out (normally generated via mouseout)
if (this.overEvent && this.overEvent.target != e.target) {
this.overEvent.relatedTarget = e.target;
e.relatedTarget = this.overEvent.target;
gesture.out(this.overEvent);
gesture.over(e);
}
this.overEvent = e;
},
touchend: function(inEvent) {
gesture.up(this.makeEvent(inEvent));
// NOTE: in touch land, there is no distinction between
// a pointer enter/leave and a drag over/out.
// While it may make sense to send a leave event when a touch
// ends, it does not make sense to send a dragout.
// We avoid this by processing out after up, but
// this ordering is ad hoc.
gesture.out(this.overEvent);
// reset the event handlers back to the mouse-friendly ones after
// a short timeout. We can't do this directly in this handler
// because it messes up Android to handle the mouseup event.
// FIXME: for 2.1 release, conditional on platform being
// desktop Chrome, since we're seeing issues in PhoneGap with this
// code.
this._touchCount -= inEvent.changedTouches.length;
if (enyo.platform.chrome && this._touchCount === 0) {
enyo.job("resetGestureEvents", function() {
gesture.events = oldevents;
}, 10);
}
},
makeEvent: function(inEvent) {
var e = enyo.clone(inEvent.changedTouches[0]);
e.srcEvent = inEvent;
e.target = this.findTarget(e.clientX, e.clientY);
// normalize "mouse button" info
e.which = 1;
//console.log("target for " + inEvent.type + " at " + e.pageX + ", " + e.pageY + " is " + (e.target ? e.target.id : "none"));
return e;
},
calcNodeOffset: function(inNode) {
if (inNode.getBoundingClientRect) {
var o = inNode.getBoundingClientRect();
return {
left: o.left,
top: o.top,
width: o.width,
height: o.height
};
}
},
findTarget: function(inX, inY) {
return document.elementFromPoint(inX, inY);
},
// NOTE: will find only 1 element under the touch and
// will fail if an element is positioned outside the bounding box of its parent
findTargetTraverse: function(inNode, inX, inY) {
var n = inNode || document.body;
var o = this.calcNodeOffset(n);
if (o && n != this.excludedTarget) {
var x = inX - o.left;
var y = inY - o.top;
//console.log("test: " + n.id + " (left: " + o.left + ", top: " + o.top + ", width: " + o.width + ", height: " + o.height + ")");
if (x>0 && y>0 && x<=o.width && y<=o.height) {
//console.log("IN: " + n.id + " -> [" + x + "," + y + " in " + o.width + "x" + o.height + "] (children: " + n.childNodes.length + ")");
var target;
for (var n$=n.childNodes, i=n$.length-1, c; (c=n$[i]); i--) {
target = this.findTargetTraverse(c, inX, inY);
if (target) {
return target;
}
}
return n;
}
}
},
connect: function() {
enyo.forEach(['ontouchstart', 'ontouchmove', 'ontouchend', 'ongesturestart', 'ongesturechange', 'ongestureend'], function(e) {
document[e] = enyo.dispatch;
});
// use proper target finding technique based on feature detection.
if (enyo.platform.androidChrome <= 18) {
// HACK: on Chrome for Android v18 on devices with higher density displays,
// document.elementFromPoint uses wrong pixel system, so manually scale
var dpr = window.devicePixelRatio;
this.findTarget = function(inX, inY) {
return document.elementFromPoint(inX * dpr, inY * dpr);
};
} else if (!document.elementFromPoint) {
this.findTarget = function(inX, inY) {
return this.findTargetTraverse(null, inX, inY);
};
}
}
};
//
touchGesture.connect();
}); | source/touch/touch.js | //* @protected
enyo.requiresWindow(function() {
// add touch-specific gesture feature
var gesture = enyo.gesture;
var oldevents = gesture.events;
//
gesture.events.touchstart = function(e) {
// for duration of this touch, only handle touch events. Old event
// structure will be restored during touchend.
gesture.events = touchGesture;
gesture.events.touchstart(e);
};
//
var touchGesture = {
_touchCount: 0,
touchstart: function(inEvent) {
enyo.job.stop("resetGestureEvents");
this._touchCount++;
this.excludedTarget = null;
var e = this.makeEvent(inEvent);
gesture.down(e);
// generate a new event object since over is a different event
e = this.makeEvent(inEvent);
this.overEvent = e;
gesture.over(e);
},
touchmove: function(inEvent) {
enyo.job.stop("resetGestureEvents");
// NOTE: allow user to supply a node to exclude from event
// target finding via the drag event.
var de = gesture.drag.dragEvent;
this.excludedTarget = de && de.dragInfo && de.dragInfo.node;
var e = this.makeEvent(inEvent);
gesture.move(e);
// prevent default document scrolling if enyo.bodyIsFitting == true
// avoid window scrolling by preventing default on this event
// note: this event can be made unpreventable (native scrollers do this)
if (enyo.bodyIsFitting) {
inEvent.preventDefault();
}
// synthesize over and out (normally generated via mouseout)
if (this.overEvent && this.overEvent.target != e.target) {
this.overEvent.relatedTarget = e.target;
e.relatedTarget = this.overEvent.target;
gesture.out(this.overEvent);
gesture.over(e);
}
this.overEvent = e;
},
touchend: function(inEvent) {
gesture.up(this.makeEvent(inEvent));
// NOTE: in touch land, there is no distinction between
// a pointer enter/leave and a drag over/out.
// While it may make sense to send a leave event when a touch
// ends, it does not make sense to send a dragout.
// We avoid this by processing out after up, but
// this ordering is ad hoc.
gesture.out(this.overEvent);
// reset the event handlers back to the mouse-friendly ones after
// a short timeout. We can't do this directly in this handler
// because it messes up Android to handle the mouseup event.
this._touchCount--;
if (this._touchCount === 0) {
enyo.job("resetGestureEvents", function() {
gesture.events = oldevents;
}, 10);
}
},
makeEvent: function(inEvent) {
var e = enyo.clone(inEvent.changedTouches[0]);
e.srcEvent = inEvent;
e.target = this.findTarget(e.clientX, e.clientY);
// normalize "mouse button" info
e.which = 1;
//console.log("target for " + inEvent.type + " at " + e.pageX + ", " + e.pageY + " is " + (e.target ? e.target.id : "none"));
return e;
},
calcNodeOffset: function(inNode) {
if (inNode.getBoundingClientRect) {
var o = inNode.getBoundingClientRect();
return {
left: o.left,
top: o.top,
width: o.width,
height: o.height
};
}
},
findTarget: function(inX, inY) {
return document.elementFromPoint(inX, inY);
},
// NOTE: will find only 1 element under the touch and
// will fail if an element is positioned outside the bounding box of its parent
findTargetTraverse: function(inNode, inX, inY) {
var n = inNode || document.body;
var o = this.calcNodeOffset(n);
if (o && n != this.excludedTarget) {
var x = inX - o.left;
var y = inY - o.top;
//console.log("test: " + n.id + " (left: " + o.left + ", top: " + o.top + ", width: " + o.width + ", height: " + o.height + ")");
if (x>0 && y>0 && x<=o.width && y<=o.height) {
//console.log("IN: " + n.id + " -> [" + x + "," + y + " in " + o.width + "x" + o.height + "] (children: " + n.childNodes.length + ")");
var target;
for (var n$=n.childNodes, i=n$.length-1, c; (c=n$[i]); i--) {
target = this.findTargetTraverse(c, inX, inY);
if (target) {
return target;
}
}
return n;
}
}
},
connect: function() {
enyo.forEach(['ontouchstart', 'ontouchmove', 'ontouchend', 'ongesturestart', 'ongesturechange', 'ongestureend'], function(e) {
document[e] = enyo.dispatch;
});
// use proper target finding technique based on feature detection.
if (enyo.platform.androidChrome <= 18) {
// HACK: on Chrome for Android v18 on devices with higher density displays,
// document.elementFromPoint uses wrong pixel system, so manually scale
var dpr = window.devicePixelRatio;
this.findTarget = function(inX, inY) {
return document.elementFromPoint(inX * dpr, inY * dpr);
};
} else if (!document.elementFromPoint) {
this.findTarget = function(inX, inY) {
return this.findTargetTraverse(null, inX, inY);
};
}
}
};
//
touchGesture.connect();
}); | ENYO-1315: temporary fix to only do event reassignment on desktop Chrome, not on device.
Enyo-DCO-1.0-Signed-Off-By: Ben Combee ([email protected])
| source/touch/touch.js | ENYO-1315: temporary fix to only do event reassignment on desktop Chrome, not on device. | <ide><path>ource/touch/touch.js
<ide> _touchCount: 0,
<ide> touchstart: function(inEvent) {
<ide> enyo.job.stop("resetGestureEvents");
<del> this._touchCount++;
<add> this._touchCount += inEvent.changedTouches.length;
<ide> this.excludedTarget = null;
<ide> var e = this.makeEvent(inEvent);
<ide> gesture.down(e);
<ide> // this ordering is ad hoc.
<ide> gesture.out(this.overEvent);
<ide> // reset the event handlers back to the mouse-friendly ones after
<del> // a short timeout. We can't do this directly in this handler
<add> // a short timeout. We can't do this directly in this handler
<ide> // because it messes up Android to handle the mouseup event.
<del> this._touchCount--;
<del> if (this._touchCount === 0) {
<add> // FIXME: for 2.1 release, conditional on platform being
<add> // desktop Chrome, since we're seeing issues in PhoneGap with this
<add> // code.
<add> this._touchCount -= inEvent.changedTouches.length;
<add> if (enyo.platform.chrome && this._touchCount === 0) {
<ide> enyo.job("resetGestureEvents", function() {
<ide> gesture.events = oldevents;
<ide> }, 10); |
|
JavaScript | mit | efbf89374a3f393cdedf33ea5ab4f0c32438cc19 | 0 | akagetsu/MFElectron,akagetsu/MFElectron | //jshint esversion:6
var app = require('./app/app');
const path = '';
app.run(path); | script.js | //jshint esversion:6
const csv = require('fast-csv'),
path = require('path');
const api = require('./api/api');
let codes = [];
const myPath = path.resolve(__dirname, "clienti.csv");
const endPath = path.resolve(__dirname, "clientiFull.csv");
csv.fromPath(myPath)
.on("data", function (data) {
codes.push(data.pop());
}).on("end", function () {
codes.shift();
getData(codes)
.then(function (fullData) {
return setData(fullData);
})
.then(function () {
console.log("done!");
});
});
const getData = function (codes) {
const bulkData = codes.map(function (code) {
return api.getInfo(code)
.then(function (res) {
return res;
}).catch(function (err) {
return err;
});
});
return Promise.all(bulkData)
.then(function (values) {
return values;
}).catch(function (err) {
return err;
});
};
const setData = function (fullData) {
return new Promise(function (resolve, reject) {
csv.writeToPath(endPath, fullData, {
headers: true
}).on('finish', function () {
resolve();
});
});
}; | Call app from the main script
| script.js | Call app from the main script | <ide><path>cript.js
<ide> //jshint esversion:6
<del>const csv = require('fast-csv'),
<del> path = require('path');
<del>
<del>const api = require('./api/api');
<del>let codes = [];
<del>const myPath = path.resolve(__dirname, "clienti.csv");
<del>const endPath = path.resolve(__dirname, "clientiFull.csv");
<del>
<del>csv.fromPath(myPath)
<del> .on("data", function (data) {
<del> codes.push(data.pop());
<del> }).on("end", function () {
<del> codes.shift();
<del>
<del> getData(codes)
<del> .then(function (fullData) {
<del> return setData(fullData);
<del> })
<del> .then(function () {
<del> console.log("done!");
<del> });
<del>
<del> });
<del>
<del>const getData = function (codes) {
<del> const bulkData = codes.map(function (code) {
<del> return api.getInfo(code)
<del> .then(function (res) {
<del> return res;
<del> }).catch(function (err) {
<del> return err;
<del> });
<del> });
<del>
<del> return Promise.all(bulkData)
<del> .then(function (values) {
<del> return values;
<del> }).catch(function (err) {
<del> return err;
<del> });
<del>};
<del>
<del>const setData = function (fullData) {
<del> return new Promise(function (resolve, reject) {
<del> csv.writeToPath(endPath, fullData, {
<del> headers: true
<del> }).on('finish', function () {
<del> resolve();
<del> });
<del> });
<del>};
<add>var app = require('./app/app');
<add>const path = '';
<add>app.run(path); |
|
Java | apache-2.0 | 6ffb12188913833baa0494f297188d5b2a90ff1e | 0 | consulo/consulo,consulo/consulo,consulo/consulo,consulo/consulo,consulo/consulo,consulo/consulo | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeHighlighting;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.editor.colors.CodeInsightColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.util.Comparing;
import com.intellij.util.containers.HashMap;
import com.intellij.util.ui.JBUI;
import consulo.platform.base.icon.PlatformIconGroup;
import consulo.ui.color.ColorValue;
import consulo.ui.image.Image;
import consulo.ui.image.ImageEffects;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
public class HighlightDisplayLevel {
private static final Map<HighlightSeverity, HighlightDisplayLevel> ourMap = new HashMap<>();
public static final HighlightDisplayLevel GENERIC_SERVER_ERROR_OR_WARNING =
new HighlightDisplayLevel(HighlightSeverity.GENERIC_SERVER_ERROR_OR_WARNING, createBoxIcon(CodeInsightColors.GENERIC_SERVER_ERROR_OR_WARNING));
public static final HighlightDisplayLevel ERROR = new HighlightDisplayLevel(HighlightSeverity.ERROR, createErrorIcon(CodeInsightColors.ERRORS_ATTRIBUTES));
public static final HighlightDisplayLevel WARNING = new HighlightDisplayLevel(HighlightSeverity.WARNING, createWarningIcon(CodeInsightColors.WARNINGS_ATTRIBUTES));
private static final Image DO_NOT_SHOW_KEY = createBoxIcon(TextAttributesKey.createTextAttributesKey("DO_NOT_SHOW"));
public static final HighlightDisplayLevel DO_NOT_SHOW = new HighlightDisplayLevel(HighlightSeverity.INFORMATION, DO_NOT_SHOW_KEY);
/**
* use #WEAK_WARNING instead
*/
@Deprecated
public static final HighlightDisplayLevel INFO = new HighlightDisplayLevel(HighlightSeverity.INFO, DO_NOT_SHOW.getIcon());
public static final HighlightDisplayLevel WEAK_WARNING = new HighlightDisplayLevel(HighlightSeverity.WEAK_WARNING, createWarningIcon(CodeInsightColors.WEAK_WARNING_ATTRIBUTES));
public static final HighlightDisplayLevel NON_SWITCHABLE_ERROR = new HighlightDisplayLevel(HighlightSeverity.ERROR);
@Nullable
private Image myIcon;
private final HighlightSeverity mySeverity;
@Nullable
public static HighlightDisplayLevel find(String name) {
for (Map.Entry<HighlightSeverity, HighlightDisplayLevel> entry : ourMap.entrySet()) {
HighlightSeverity severity = entry.getKey();
HighlightDisplayLevel displayLevel = entry.getValue();
if (Comparing.strEqual(severity.getName(), name)) {
return displayLevel;
}
}
return null;
}
public static HighlightDisplayLevel find(HighlightSeverity severity) {
return ourMap.get(severity);
}
public HighlightDisplayLevel(@Nonnull HighlightSeverity severity, @Nullable Image icon) {
this(severity);
myIcon = icon;
ourMap.put(mySeverity, this);
}
public HighlightDisplayLevel(@Nonnull HighlightSeverity severity) {
mySeverity = severity;
}
@Override
public String toString() {
return mySeverity.toString();
}
@Nonnull
public String getName() {
return mySeverity.getName();
}
@Nullable
public Image getIcon() {
return myIcon;
}
@Nonnull
public HighlightSeverity getSeverity() {
return mySeverity;
}
public static void registerSeverity(@Nonnull HighlightSeverity severity, final TextAttributesKey key, @Nullable Image icon) {
Image severityIcon = icon != null ? icon : createBoxIcon(key);
final HighlightDisplayLevel level = ourMap.get(severity);
if (level == null) {
new HighlightDisplayLevel(severity, severityIcon);
}
else {
level.myIcon = severityIcon;
}
}
@Nonnull
private static ColorValue buildColorValue(@Nonnull TextAttributesKey key) {
return ColorValue.lazy(() -> {
final EditorColorsManager manager = EditorColorsManager.getInstance();
TextAttributes attributes = manager.getGlobalScheme().getAttributes(key);
ColorValue stripe = attributes.getErrorStripeColor();
if (stripe != null) return stripe;
return attributes.getEffectColor();
});
}
public static int getEmptyIconDim() {
return JBUI.scale(14);
}
@Nonnull
public static Image createBoxIcon(@Nonnull TextAttributesKey key) {
return ImageEffects.colorFilled(getEmptyIconDim(), getEmptyIconDim(), buildColorValue(key));
}
@Nonnull
private static Image createErrorIcon(@Nonnull TextAttributesKey textAttributesKey) {
return ImageEffects.colorize(PlatformIconGroup.generalInspectionsError(), buildColorValue(textAttributesKey));
}
@Nonnull
private static Image createWarningIcon(@Nonnull TextAttributesKey textAttributesKey) {
return ImageEffects.colorize(PlatformIconGroup.generalInspectionsWarning(), buildColorValue(textAttributesKey));
}
@Nonnull
public static Image createIconByMask(final ColorValue renderColor) {
return ImageEffects.colorFilled(getEmptyIconDim(), getEmptyIconDim(), renderColor);
}
}
| modules/base/analysis-api/src/main/java/com/intellij/codeHighlighting/HighlightDisplayLevel.java | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeHighlighting;
import com.intellij.icons.AllIcons;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.editor.colors.CodeInsightColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.util.Comparing;
import com.intellij.util.containers.HashMap;
import com.intellij.util.ui.JBUI;
import consulo.ui.color.ColorValue;
import consulo.ui.image.Image;
import consulo.ui.image.ImageEffects;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
public class HighlightDisplayLevel {
private static final Map<HighlightSeverity, HighlightDisplayLevel> ourMap = new HashMap<>();
public static final HighlightDisplayLevel GENERIC_SERVER_ERROR_OR_WARNING =
new HighlightDisplayLevel(HighlightSeverity.GENERIC_SERVER_ERROR_OR_WARNING, createBoxIcon(CodeInsightColors.GENERIC_SERVER_ERROR_OR_WARNING));
public static final HighlightDisplayLevel ERROR = new HighlightDisplayLevel(HighlightSeverity.ERROR, createErrorIcon(CodeInsightColors.ERRORS_ATTRIBUTES));
public static final HighlightDisplayLevel WARNING = new HighlightDisplayLevel(HighlightSeverity.WARNING, createErrorIcon(CodeInsightColors.WARNINGS_ATTRIBUTES));
private static final Image DO_NOT_SHOW_KEY = createBoxIcon(TextAttributesKey.createTextAttributesKey("DO_NOT_SHOW"));
public static final HighlightDisplayLevel DO_NOT_SHOW = new HighlightDisplayLevel(HighlightSeverity.INFORMATION, DO_NOT_SHOW_KEY);
/**
* use #WEAK_WARNING instead
*/
@Deprecated
public static final HighlightDisplayLevel INFO = new HighlightDisplayLevel(HighlightSeverity.INFO, DO_NOT_SHOW.getIcon());
public static final HighlightDisplayLevel WEAK_WARNING = new HighlightDisplayLevel(HighlightSeverity.WEAK_WARNING, createErrorIcon(CodeInsightColors.WEAK_WARNING_ATTRIBUTES));
public static final HighlightDisplayLevel NON_SWITCHABLE_ERROR = new HighlightDisplayLevel(HighlightSeverity.ERROR);
@Nullable
private Image myIcon;
private final HighlightSeverity mySeverity;
@Nullable
public static HighlightDisplayLevel find(String name) {
for (Map.Entry<HighlightSeverity, HighlightDisplayLevel> entry : ourMap.entrySet()) {
HighlightSeverity severity = entry.getKey();
HighlightDisplayLevel displayLevel = entry.getValue();
if (Comparing.strEqual(severity.getName(), name)) {
return displayLevel;
}
}
return null;
}
public static HighlightDisplayLevel find(HighlightSeverity severity) {
return ourMap.get(severity);
}
public HighlightDisplayLevel(@Nonnull HighlightSeverity severity, @Nullable Image icon) {
this(severity);
myIcon = icon;
ourMap.put(mySeverity, this);
}
public HighlightDisplayLevel(@Nonnull HighlightSeverity severity) {
mySeverity = severity;
}
@Override
public String toString() {
return mySeverity.toString();
}
@Nonnull
public String getName() {
return mySeverity.getName();
}
@Nullable
public Image getIcon() {
return myIcon;
}
@Nonnull
public HighlightSeverity getSeverity() {
return mySeverity;
}
public static void registerSeverity(@Nonnull HighlightSeverity severity, final TextAttributesKey key, @Nullable Image icon) {
Image severityIcon = icon != null ? icon : createBoxIcon(key);
final HighlightDisplayLevel level = ourMap.get(severity);
if (level == null) {
new HighlightDisplayLevel(severity, severityIcon);
}
else {
level.myIcon = severityIcon;
}
}
@Nonnull
private static ColorValue buildColorValue(@Nonnull TextAttributesKey key) {
return ColorValue.lazy(() -> {
final EditorColorsManager manager = EditorColorsManager.getInstance();
TextAttributes attributes = manager.getGlobalScheme().getAttributes(key);
ColorValue stripe = attributes.getErrorStripeColor();
if (stripe != null) return stripe;
return attributes.getEffectColor();
});
}
public static int getEmptyIconDim() {
return JBUI.scale(14);
}
@Nonnull
public static Image createBoxIcon(@Nonnull TextAttributesKey key) {
return ImageEffects.colorFilled(getEmptyIconDim(), getEmptyIconDim(), buildColorValue(key));
}
@Nonnull
private static Image createErrorIcon(@Nonnull TextAttributesKey textAttributesKey) {
return ImageEffects.colorize(AllIcons.General.InspectionsError, buildColorValue(textAttributesKey));
}
@Nonnull
public static Image createIconByMask(final ColorValue renderColor) {
return ImageEffects.colorFilled(getEmptyIconDim(), getEmptyIconDim(), renderColor);
}
}
| use warning icon
| modules/base/analysis-api/src/main/java/com/intellij/codeHighlighting/HighlightDisplayLevel.java | use warning icon | <ide><path>odules/base/analysis-api/src/main/java/com/intellij/codeHighlighting/HighlightDisplayLevel.java
<ide> */
<ide> package com.intellij.codeHighlighting;
<ide>
<del>import com.intellij.icons.AllIcons;
<ide> import com.intellij.lang.annotation.HighlightSeverity;
<ide> import com.intellij.openapi.editor.colors.CodeInsightColors;
<ide> import com.intellij.openapi.editor.colors.EditorColorsManager;
<ide> import com.intellij.openapi.util.Comparing;
<ide> import com.intellij.util.containers.HashMap;
<ide> import com.intellij.util.ui.JBUI;
<add>import consulo.platform.base.icon.PlatformIconGroup;
<ide> import consulo.ui.color.ColorValue;
<ide> import consulo.ui.image.Image;
<ide> import consulo.ui.image.ImageEffects;
<ide>
<ide> public static final HighlightDisplayLevel ERROR = new HighlightDisplayLevel(HighlightSeverity.ERROR, createErrorIcon(CodeInsightColors.ERRORS_ATTRIBUTES));
<ide>
<del> public static final HighlightDisplayLevel WARNING = new HighlightDisplayLevel(HighlightSeverity.WARNING, createErrorIcon(CodeInsightColors.WARNINGS_ATTRIBUTES));
<add> public static final HighlightDisplayLevel WARNING = new HighlightDisplayLevel(HighlightSeverity.WARNING, createWarningIcon(CodeInsightColors.WARNINGS_ATTRIBUTES));
<ide>
<ide> private static final Image DO_NOT_SHOW_KEY = createBoxIcon(TextAttributesKey.createTextAttributesKey("DO_NOT_SHOW"));
<ide> public static final HighlightDisplayLevel DO_NOT_SHOW = new HighlightDisplayLevel(HighlightSeverity.INFORMATION, DO_NOT_SHOW_KEY);
<ide> */
<ide> @Deprecated
<ide> public static final HighlightDisplayLevel INFO = new HighlightDisplayLevel(HighlightSeverity.INFO, DO_NOT_SHOW.getIcon());
<del> public static final HighlightDisplayLevel WEAK_WARNING = new HighlightDisplayLevel(HighlightSeverity.WEAK_WARNING, createErrorIcon(CodeInsightColors.WEAK_WARNING_ATTRIBUTES));
<add>
<add> public static final HighlightDisplayLevel WEAK_WARNING = new HighlightDisplayLevel(HighlightSeverity.WEAK_WARNING, createWarningIcon(CodeInsightColors.WEAK_WARNING_ATTRIBUTES));
<ide>
<ide> public static final HighlightDisplayLevel NON_SWITCHABLE_ERROR = new HighlightDisplayLevel(HighlightSeverity.ERROR);
<ide>
<ide>
<ide> @Nonnull
<ide> private static Image createErrorIcon(@Nonnull TextAttributesKey textAttributesKey) {
<del> return ImageEffects.colorize(AllIcons.General.InspectionsError, buildColorValue(textAttributesKey));
<add> return ImageEffects.colorize(PlatformIconGroup.generalInspectionsError(), buildColorValue(textAttributesKey));
<add> }
<add>
<add> @Nonnull
<add> private static Image createWarningIcon(@Nonnull TextAttributesKey textAttributesKey) {
<add> return ImageEffects.colorize(PlatformIconGroup.generalInspectionsWarning(), buildColorValue(textAttributesKey));
<ide> }
<ide>
<ide> @Nonnull |
|
Java | lgpl-2.1 | 68b6ffe38d67a3983e70067f7f87b25ce87b488d | 0 | zwobit/exist,olvidalo/exist,jessealama/exist,wolfgangmm/exist,ljo/exist,shabanovd/exist,zwobit/exist,RemiKoutcherawy/exist,RemiKoutcherawy/exist,ambs/exist,zwobit/exist,dizzzz/exist,ljo/exist,kohsah/exist,olvidalo/exist,joewiz/exist,dizzzz/exist,RemiKoutcherawy/exist,patczar/exist,wshager/exist,kohsah/exist,MjAbuz/exist,adamretter/exist,shabanovd/exist,eXist-db/exist,RemiKoutcherawy/exist,jessealama/exist,ambs/exist,hungerburg/exist,windauer/exist,dizzzz/exist,opax/exist,dizzzz/exist,adamretter/exist,adamretter/exist,eXist-db/exist,jessealama/exist,wshager/exist,zwobit/exist,kohsah/exist,wolfgangmm/exist,windauer/exist,joewiz/exist,windauer/exist,patczar/exist,wolfgangmm/exist,hungerburg/exist,opax/exist,MjAbuz/exist,hungerburg/exist,kohsah/exist,patczar/exist,MjAbuz/exist,dizzzz/exist,lcahlander/exist,kohsah/exist,lcahlander/exist,lcahlander/exist,wolfgangmm/exist,wshager/exist,eXist-db/exist,olvidalo/exist,jessealama/exist,jensopetersen/exist,jensopetersen/exist,hungerburg/exist,zwobit/exist,olvidalo/exist,ljo/exist,wshager/exist,patczar/exist,ljo/exist,jessealama/exist,wolfgangmm/exist,windauer/exist,wolfgangmm/exist,adamretter/exist,opax/exist,shabanovd/exist,joewiz/exist,wshager/exist,windauer/exist,zwobit/exist,adamretter/exist,eXist-db/exist,joewiz/exist,adamretter/exist,ambs/exist,kohsah/exist,RemiKoutcherawy/exist,MjAbuz/exist,opax/exist,shabanovd/exist,ambs/exist,joewiz/exist,opax/exist,lcahlander/exist,ambs/exist,olvidalo/exist,ljo/exist,jensopetersen/exist,patczar/exist,MjAbuz/exist,windauer/exist,patczar/exist,dizzzz/exist,jessealama/exist,shabanovd/exist,jensopetersen/exist,jensopetersen/exist,MjAbuz/exist,hungerburg/exist,lcahlander/exist,RemiKoutcherawy/exist,eXist-db/exist,shabanovd/exist,jensopetersen/exist,wshager/exist,eXist-db/exist,lcahlander/exist,ambs/exist,joewiz/exist,ljo/exist | /*
* eXist Open Source Native XML Database
* Copyright (C) 2001-06, Wolfgang M. Meier ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* $Id$
*/
package org.exist.xquery.value;
import java.text.Collator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.exist.dom.QName;
import org.exist.util.Collations;
import org.exist.util.UTF8;
import org.exist.util.XMLChar;
import org.exist.xquery.Constants;
import org.exist.xquery.XPathException;
public class StringValue extends AtomicValue {
public final static StringValue EMPTY_STRING = new StringValue("");
private final static String langRegex =
"/(([a-z]|[A-Z])([a-z]|[A-Z])|" // ISO639Code
+ "([iI]-([a-z]|[A-Z])+)|" // IanaCode
+ "([xX]-([a-z]|[A-Z])+))" // UserCode
+ "(-([a-z]|[A-Z])+)*/"; // Subcode
private final static Pattern langPattern = Pattern.compile(langRegex);
protected int type = Type.STRING;
protected String value;
public StringValue(String stringValue, int type) throws XPathException {
this.type = type;
if(type == Type.STRING)
this.value = stringValue;
else if(type == Type.NORMALIZED_STRING)
this.value = normalizeWhitespace(stringValue);
else {
this.value = collapseWhitespace(stringValue);
checkType();
}
}
public StringValue(String stringValue) {
value = stringValue;
}
public StringValue expand() throws XPathException {
value = expand(value);
return this;
}
private void checkType() throws XPathException {
switch(type) {
case Type.NORMALIZED_STRING:
case Type.TOKEN:
return;
case Type.LANGUAGE:
Matcher matcher = langPattern.matcher(value);
if (!matcher.matches())
throw new XPathException(
"Type error: string "
+ value
+ " is not valid for type xs:language");
return;
case Type.NAME:
if(!QName.isQName(value))
throw new XPathException("Type error: string " + value + " is not a valid xs:Name");
return;
case Type.NCNAME:
case Type.ID:
case Type.IDREF:
case Type.ENTITY:
if(!XMLChar.isValidNCName(value))
throw new XPathException("Type error: string " + value + " is not a valid " + Type.getTypeName(type));
case Type.NMTOKEN:
if(!XMLChar.isValidNmtoken(value))
throw new XPathException("Type error: string " + value + " is not a valid xs:NMTOKEN");
}
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#getType()
*/
public int getType() {
return type;
}
/* (non-Javadoc)
* @see org.exist.xquery.value.Item#getStringValue()
*/
public String getStringValue() {
return value;
}
public Item itemAt(int pos) {
return pos == 0 ? this : null;
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#convertTo(int)
*/
public AtomicValue convertTo(int requiredType) throws XPathException {
switch (requiredType) {
//TODO : should we allow these 2 type under-promotions ?
case Type.ATOMIC :
case Type.ITEM :
case Type.STRING :
return this;
case Type.NORMALIZED_STRING:
case Type.TOKEN:
case Type.LANGUAGE:
case Type.NMTOKEN:
case Type.NAME:
case Type.NCNAME:
case Type.ID:
case Type.IDREF:
case Type.ENTITY:
return new StringValue(value, requiredType);
case Type.ANY_URI :
return new AnyURIValue(value);
case Type.BOOLEAN :
String trimmed = trimWhitespace(value);
if (trimmed.equals("0") || trimmed.equals("false"))
return BooleanValue.FALSE;
else if (trimmed.equals("1") || trimmed.equals("true"))
return BooleanValue.TRUE;
else
throw new XPathException(
"cannot convert string '" + value + "' to boolean");
case Type.FLOAT :
return new FloatValue(value);
case Type.DOUBLE :
case Type.NUMBER :
return new DoubleValue(this);
case Type.DECIMAL :
return new DecimalValue(value);
case Type.INTEGER :
case Type.NON_POSITIVE_INTEGER :
case Type.NEGATIVE_INTEGER :
case Type.POSITIVE_INTEGER :
case Type.LONG :
case Type.INT :
case Type.SHORT :
case Type.BYTE :
case Type.NON_NEGATIVE_INTEGER :
case Type.UNSIGNED_LONG :
case Type.UNSIGNED_INT :
case Type.UNSIGNED_SHORT :
case Type.UNSIGNED_BYTE :
return new IntegerValue(value, requiredType);
case Type.BASE64_BINARY :
return new Base64Binary(value);
case Type.HEX_BINARY :
return new HexBinary(value);
case Type.DATE_TIME :
return new DateTimeValue(value);
case Type.TIME :
return new TimeValue(value);
case Type.DATE :
return new DateValue(value);
case Type.DURATION :
return new DurationValue(value);
case Type.YEAR_MONTH_DURATION :
return new YearMonthDurationValue(value);
case Type.DAY_TIME_DURATION :
return new DayTimeDurationValue(value);
case Type.GYEAR :
return new GYearValue(value);
case Type.GMONTH :
return new GMonthValue(value);
case Type.GDAY :
return new GDayValue(value);
case Type.GYEARMONTH :
return new GYearMonthValue(value);
case Type.GMONTHDAY :
return new GMonthDayValue(value);
case Type.UNTYPED_ATOMIC :
return new UntypedAtomicValue(getStringValue());
case Type.QNAME :
return new QNameValue(null, new QName(value));
default :
throw new XPathException("FORG0001: cannot cast '" +
Type.getTypeName(this.getItemType()) + "(\"" + getStringValue() + "\")' to " +
Type.getTypeName(requiredType));
}
}
public int conversionPreference(Class javaClass) {
if(javaClass.isAssignableFrom(StringValue.class)) return 0;
if(javaClass == String.class || javaClass == CharSequence.class) return 1;
if(javaClass == Character.class || javaClass == char.class) return 2;
if(javaClass == Double.class || javaClass == double.class) return 10;
if(javaClass == Float.class || javaClass == float.class) return 11;
if(javaClass == Long.class || javaClass == long.class) return 12;
if(javaClass == Integer.class || javaClass == int.class) return 13;
if(javaClass == Short.class || javaClass == short.class) return 14;
if(javaClass == Byte.class || javaClass == byte.class) return 15;
if(javaClass == Boolean.class || javaClass == boolean.class) return 16;
if(javaClass == Object.class) return 20;
return Integer.MAX_VALUE;
}
public Object toJavaObject(Class target) throws XPathException {
if(target.isAssignableFrom(StringValue.class))
return this;
else if(target == Object.class || target == String.class || target == CharSequence.class)
return value;
else if(target == double.class || target == Double.class) {
DoubleValue v = (DoubleValue)convertTo(Type.DOUBLE);
return new Double(v.getValue());
} else if(target == float.class || target == Float.class) {
FloatValue v = (FloatValue)convertTo(Type.FLOAT);
return new Float(v.value);
} else if(target == long.class || target == Long.class) {
IntegerValue v = (IntegerValue)convertTo(Type.LONG);
return new Long(v.getInt());
} else if(target == int.class || target == Integer.class) {
IntegerValue v = (IntegerValue)convertTo(Type.INT);
return new Integer(v.getInt());
} else if(target == short.class || target == Short.class) {
IntegerValue v = (IntegerValue)convertTo(Type.SHORT);
return new Short((short)v.getInt());
} else if(target == byte.class || target == Byte.class) {
IntegerValue v = (IntegerValue)convertTo(Type.BYTE);
return new Byte((byte)v.getInt());
} else if(target == boolean.class || target == Boolean.class) {
return Boolean.valueOf(effectiveBooleanValue());
} else if(target == char.class || target == Character.class) {
if(value.length() > 1 || value.length() == 0)
throw new XPathException("cannot convert string with length = 0 or length > 1 to Java character");
return new Character(value.charAt(0));
}
throw new XPathException("cannot convert value of type " + Type.getTypeName(type) +
" to Java object of type " + target.getName());
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#compareTo(int, org.exist.xquery.value.AtomicValue)
*/
public boolean compareTo(Collator collator, int operator, AtomicValue other) throws XPathException {
//if (Type.subTypeOf(other.getType(), Type.STRING)) {
int cmp = Collations.compare(collator, value, other.getStringValue());
switch (operator) {
case Constants.EQ :
return cmp == 0;
case Constants.NEQ :
return cmp != 0;
case Constants.LT :
return cmp < 0;
case Constants.LTEQ :
return cmp <= 0;
case Constants.GT :
return cmp > 0;
case Constants.GTEQ :
return cmp >= 0;
default :
throw new XPathException("Type error: cannot apply operand to string value");
}
//}
//throw new XPathException(
// "Type error: operands are not comparable; expected xs:string; got "
// + Type.getTypeName(other.getType()));
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#compareTo(org.exist.xquery.value.AtomicValue)
*/
public int compareTo(Collator collator, AtomicValue other) throws XPathException {
return Collations.compare(collator, value, other.getStringValue());
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#startsWith(org.exist.xquery.value.AtomicValue)
*/
public boolean startsWith(Collator collator, AtomicValue other) throws XPathException {
return Collations.startsWith(collator, value, other.getStringValue());
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#endsWith(org.exist.xquery.value.AtomicValue)
*/
public boolean endsWith(Collator collator, AtomicValue other) throws XPathException {
return Collations.endsWith(collator, value, other.getStringValue());
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#contains(org.exist.xquery.value.AtomicValue)
*/
public boolean contains(Collator collator, AtomicValue other) throws XPathException {
return Collations.indexOf(collator, value, other.getStringValue()) != Constants.STRING_NOT_FOUND;
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#effectiveBooleanValue()
*/
public boolean effectiveBooleanValue() throws XPathException {
return value.length() > 0;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
public String toString() {
return value;
}
public final static String normalizeWhitespace(CharSequence seq) {
StringBuffer copy = new StringBuffer(seq.length());
char ch;
for (int i = 0; i < seq.length(); i++) {
ch = seq.charAt(i);
switch (ch) {
case '\n' :
case '\r' :
case '\t' :
copy.append(' ');
break;
default :
copy.append(ch);
}
}
return copy.toString();
}
/**
* Collapses all sequences of adjacent whitespace chars in the input string
* into a single space.
*
* @param in
*/
public static String collapseWhitespace(CharSequence in) {
if (in.length() == 0) {
return in.toString();
}
int i = 0;
// this method is performance critical, so first test if we need to collapse at all
for (; i < in.length(); i++) {
char c = in.charAt(i);
if(XMLChar.isSpace(c)) {
if(i + 1 < in.length() && XMLChar.isSpace(in.charAt(i + 1)))
break;
}
}
if(i == in.length())
// no whitespace to collapse, just return
return in.toString();
// start to collapse whitespace
StringBuffer sb = new StringBuffer(in.length());
sb.append(in.subSequence(0, i + 1).toString());
boolean inWhitespace = true;
for (; i < in.length(); i++) {
char c = in.charAt(i);
if(XMLChar.isSpace(c)) {
if (inWhitespace) {
// remove the whitespace
} else {
sb.append(' ');
inWhitespace = true;
}
} else {
sb.append(c);
inWhitespace = false;
}
}
if (sb.charAt(sb.length() - 1) == ' ') {
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
public final static String trimWhitespace(String in) {
if (in.length()==0) {
return in;
}
int first = 0;
int last = in.length() - 1;
while (in.charAt(first) <= 0x20) {
if (first++ >= last) {
return "";
}
}
while (in.charAt(last) <= 0x20) {
last--;
}
return in.substring(first, last+1);
}
public final static String expand(CharSequence seq) throws XPathException {
StringBuffer buf = new StringBuffer(seq.length());
StringBuffer entityRef = null;
char ch;
for (int i = 0; i < seq.length(); i++) {
ch = seq.charAt(i);
switch (ch) {
case '&' :
if (entityRef == null)
entityRef = new StringBuffer();
else
entityRef.setLength(0);
if ((i+1)==seq.length()) {
throw new XPathException("XPST0003 : Ampersands (&) must be escaped.");
}
if ((i+2)==seq.length()) {
throw new XPathException("XPST0003 : Ampersands (&) must be escaped (missing ;).");
}
ch = seq.charAt(i+1);
if (ch!='#') {
if (!Character.isLetter(ch)) {
throw new XPathException("XPST0003 : Ampersands (&) must be escaped (following character was not a name start character).");
}
entityRef.append(ch);
boolean found = false;
for (int j = i + 2; j < seq.length(); j++) {
ch = seq.charAt(j);
if (ch != ';' && (ch=='.' || ch=='_' || ch=='-' || Character.isLetterOrDigit(ch))) {
entityRef.append(ch);
} else if (ch==';') {
found = true;
i = j;
break;
} else {
break;
}
}
if (found) {
buf.append(expandEntity(entityRef.toString()));
} else {
throw new XPathException("XPST0003 : Invalid character in entity name ("+ch+") or missing ;");
}
} else {
entityRef.append(ch);
ch = seq.charAt(i+2);
boolean found = false;
if (ch=='x') {
entityRef.append(ch);
// hex number
for (int j = i + 3; j < seq.length(); j++) {
ch = seq.charAt(j);
if (ch != ';' && (ch=='0' || ch=='1' || ch=='2' || ch=='3' || ch=='4' || ch=='5' || ch=='6' || ch=='7' || ch=='8' || ch=='9' ||
ch=='a' || ch=='b' || ch=='c' || ch=='d' || ch=='e' || ch=='f' ||
ch=='A' || ch=='B' || ch=='C' || ch=='D' || ch=='E' || ch=='F')) {
entityRef.append(ch);
} else if (ch==';') {
found = true;
i = j;
break;
} else {
break;
}
}
} else {
// decimal number
for (int j = i + 2; j < seq.length(); j++) {
ch = seq.charAt(j);
if (ch != ';' && (ch=='0' || ch=='1' || ch=='2' || ch=='3' || ch=='4' || ch=='5' || ch=='6' || ch=='7' || ch=='8' || ch=='9')) {
entityRef.append(ch);
} else if (ch==';') {
found = true;
i = j;
break;
} else {
break;
}
}
}
if (found) {
buf.append(expandEntity(entityRef.toString()));
} else {
throw new XPathException("XPST0003 : Invalid character in character reference ("+ch+") or missing ;");
}
}
break;
case '\r':
// drop carriage returns
if ((i+1)!=seq.length()) {
ch = seq.charAt(i+1);
if (ch!='\n') {
buf.append('\n');
}
}
break;
default :
buf.append(ch);
}
}
return buf.toString();
}
private final static char expandEntity(String buf) throws XPathException {
if (buf.equals("amp"))
return '&';
else if (buf.equals("lt"))
return '<';
else if (buf.equals("gt"))
return '>';
else if (buf.equals("quot"))
return '"';
else if (buf.equals("apos"))
return '\'';
else if (buf.length() > 1 && buf.charAt(0) == '#')
return expandCharRef(buf.substring(1));
else
throw new XPathException("Unknown entity reference: " + buf);
}
private final static char expandCharRef(String buf) throws XPathException {
try {
int charNumber;
if (buf.length() > 1 && buf.charAt(0) == 'x') {
// Hex
charNumber = Integer.parseInt(buf.substring(1), 16);
} else {
charNumber = Integer.parseInt(buf);
}
if (charNumber==0) {
throw new XPathException("XQST0090 : Character number zero (0) is not allowed.");
}
return (char)charNumber;
} catch (NumberFormatException e) {
throw new XPathException("Unknown character reference: " + buf);
}
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#max(org.exist.xquery.value.AtomicValue)
*/
public AtomicValue max(Collator collator, AtomicValue other) throws XPathException {
if (Type.subTypeOf(other.getType(), Type.STRING))
return Collations.compare(collator, value, ((StringValue) other).value) > 0 ? this : other;
else
return Collations.compare(collator, value, ((StringValue) other.convertTo(getType())).value) > 0
? this
: other;
}
public AtomicValue min(Collator collator, AtomicValue other) throws XPathException {
if (Type.subTypeOf(other.getType(), Type.STRING))
return Collations.compare(collator, value, ((StringValue) other).value) < 0 ? this : other;
else
return Collations.compare(collator, value, ((StringValue) other.convertTo(getType())).value) < 0
? this
: other;
}
/* (non-Javadoc)
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
public int compareTo(Object o) {
AtomicValue other = (AtomicValue)o;
if(Type.subTypeOf(other.getType(), Type.STRING))
return value.compareTo(((StringValue)other).value);
else
return getType() > other.getType() ? 1 : -1;
}
/** Serialize for the persistant storage
* @param offset
* */
public byte[] serializeValue( int offset, boolean caseSensitive) {
final String val = caseSensitive ? value : value.toLowerCase();
final byte[] data = new byte[ offset + 1 + UTF8.encoded(val) ];
data[offset] = (byte) type; // TODO: cast to byte is not safe
UTF8.encode(val, data, offset+1);
return data;
}
}
| src/org/exist/xquery/value/StringValue.java | /*
* eXist Open Source Native XML Database
* Copyright (C) 2001-06, Wolfgang M. Meier ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* $Id$
*/
package org.exist.xquery.value;
import java.text.Collator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.exist.dom.QName;
import org.exist.util.Collations;
import org.exist.util.UTF8;
import org.exist.util.XMLChar;
import org.exist.xquery.Constants;
import org.exist.xquery.XPathException;
public class StringValue extends AtomicValue {
public final static StringValue EMPTY_STRING = new StringValue("");
private final static String langRegex =
"/(([a-z]|[A-Z])([a-z]|[A-Z])|" // ISO639Code
+ "([iI]-([a-z]|[A-Z])+)|" // IanaCode
+ "([xX]-([a-z]|[A-Z])+))" // UserCode
+ "(-([a-z]|[A-Z])+)*/"; // Subcode
private final static Pattern langPattern = Pattern.compile(langRegex);
protected int type = Type.STRING;
protected String value;
public StringValue(String stringValue, int type) throws XPathException {
this.type = type;
if(type == Type.STRING)
this.value = stringValue;
else if(type == Type.NORMALIZED_STRING)
this.value = normalizeWhitespace(stringValue);
else {
this.value = collapseWhitespace(stringValue);
checkType();
}
}
public StringValue(String stringValue) {
value = stringValue;
}
public StringValue expand() throws XPathException {
value = expand(value);
return this;
}
private void checkType() throws XPathException {
switch(type) {
case Type.NORMALIZED_STRING:
case Type.TOKEN:
return;
case Type.LANGUAGE:
Matcher matcher = langPattern.matcher(value);
if (!matcher.matches())
throw new XPathException(
"Type error: string "
+ value
+ " is not valid for type xs:language");
return;
case Type.NAME:
if(!QName.isQName(value))
throw new XPathException("Type error: string " + value + " is not a valid xs:Name");
return;
case Type.NCNAME:
case Type.ID:
case Type.IDREF:
case Type.ENTITY:
if(!XMLChar.isValidNCName(value))
throw new XPathException("Type error: string " + value + " is not a valid " + Type.getTypeName(type));
case Type.NMTOKEN:
if(!XMLChar.isValidNmtoken(value))
throw new XPathException("Type error: string " + value + " is not a valid xs:NMTOKEN");
}
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#getType()
*/
public int getType() {
return Type.STRING;
}
/* (non-Javadoc)
* @see org.exist.xquery.value.Item#getStringValue()
*/
public String getStringValue() {
return value;
}
public Item itemAt(int pos) {
return pos == 0 ? this : null;
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#convertTo(int)
*/
public AtomicValue convertTo(int requiredType) throws XPathException {
switch (requiredType) {
//TODO : should we allow these 2 type under-promotions ?
case Type.ATOMIC :
case Type.ITEM :
case Type.STRING :
return this;
case Type.NORMALIZED_STRING:
case Type.TOKEN:
case Type.LANGUAGE:
case Type.NMTOKEN:
case Type.NAME:
case Type.NCNAME:
case Type.ID:
case Type.IDREF:
case Type.ENTITY:
return new StringValue(value, requiredType);
case Type.ANY_URI :
return new AnyURIValue(value);
case Type.BOOLEAN :
String trimmed = trimWhitespace(value);
if (trimmed.equals("0") || trimmed.equals("false"))
return BooleanValue.FALSE;
else if (trimmed.equals("1") || trimmed.equals("true"))
return BooleanValue.TRUE;
else
throw new XPathException(
"cannot convert string '" + value + "' to boolean");
case Type.FLOAT :
return new FloatValue(value);
case Type.DOUBLE :
case Type.NUMBER :
return new DoubleValue(this);
case Type.DECIMAL :
return new DecimalValue(value);
case Type.INTEGER :
case Type.NON_POSITIVE_INTEGER :
case Type.NEGATIVE_INTEGER :
case Type.POSITIVE_INTEGER :
case Type.LONG :
case Type.INT :
case Type.SHORT :
case Type.BYTE :
case Type.NON_NEGATIVE_INTEGER :
case Type.UNSIGNED_LONG :
case Type.UNSIGNED_INT :
case Type.UNSIGNED_SHORT :
case Type.UNSIGNED_BYTE :
return new IntegerValue(value, requiredType);
case Type.BASE64_BINARY :
return new Base64Binary(value);
case Type.HEX_BINARY :
return new HexBinary(value);
case Type.DATE_TIME :
return new DateTimeValue(value);
case Type.TIME :
return new TimeValue(value);
case Type.DATE :
return new DateValue(value);
case Type.DURATION :
return new DurationValue(value);
case Type.YEAR_MONTH_DURATION :
return new YearMonthDurationValue(value);
case Type.DAY_TIME_DURATION :
return new DayTimeDurationValue(value);
case Type.GYEAR :
return new GYearValue(value);
case Type.GMONTH :
return new GMonthValue(value);
case Type.GDAY :
return new GDayValue(value);
case Type.GYEARMONTH :
return new GYearMonthValue(value);
case Type.GMONTHDAY :
return new GMonthDayValue(value);
case Type.UNTYPED_ATOMIC :
return new UntypedAtomicValue(getStringValue());
case Type.QNAME :
return new QNameValue(null, new QName(value));
default :
throw new XPathException("FORG0001: cannot cast '" +
Type.getTypeName(this.getItemType()) + "(\"" + getStringValue() + "\")' to " +
Type.getTypeName(requiredType));
}
}
public int conversionPreference(Class javaClass) {
if(javaClass.isAssignableFrom(StringValue.class)) return 0;
if(javaClass == String.class || javaClass == CharSequence.class) return 1;
if(javaClass == Character.class || javaClass == char.class) return 2;
if(javaClass == Double.class || javaClass == double.class) return 10;
if(javaClass == Float.class || javaClass == float.class) return 11;
if(javaClass == Long.class || javaClass == long.class) return 12;
if(javaClass == Integer.class || javaClass == int.class) return 13;
if(javaClass == Short.class || javaClass == short.class) return 14;
if(javaClass == Byte.class || javaClass == byte.class) return 15;
if(javaClass == Boolean.class || javaClass == boolean.class) return 16;
if(javaClass == Object.class) return 20;
return Integer.MAX_VALUE;
}
public Object toJavaObject(Class target) throws XPathException {
if(target.isAssignableFrom(StringValue.class))
return this;
else if(target == Object.class || target == String.class || target == CharSequence.class)
return value;
else if(target == double.class || target == Double.class) {
DoubleValue v = (DoubleValue)convertTo(Type.DOUBLE);
return new Double(v.getValue());
} else if(target == float.class || target == Float.class) {
FloatValue v = (FloatValue)convertTo(Type.FLOAT);
return new Float(v.value);
} else if(target == long.class || target == Long.class) {
IntegerValue v = (IntegerValue)convertTo(Type.LONG);
return new Long(v.getInt());
} else if(target == int.class || target == Integer.class) {
IntegerValue v = (IntegerValue)convertTo(Type.INT);
return new Integer(v.getInt());
} else if(target == short.class || target == Short.class) {
IntegerValue v = (IntegerValue)convertTo(Type.SHORT);
return new Short((short)v.getInt());
} else if(target == byte.class || target == Byte.class) {
IntegerValue v = (IntegerValue)convertTo(Type.BYTE);
return new Byte((byte)v.getInt());
} else if(target == boolean.class || target == Boolean.class) {
return Boolean.valueOf(effectiveBooleanValue());
} else if(target == char.class || target == Character.class) {
if(value.length() > 1 || value.length() == 0)
throw new XPathException("cannot convert string with length = 0 or length > 1 to Java character");
return new Character(value.charAt(0));
}
throw new XPathException("cannot convert value of type " + Type.getTypeName(type) +
" to Java object of type " + target.getName());
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#compareTo(int, org.exist.xquery.value.AtomicValue)
*/
public boolean compareTo(Collator collator, int operator, AtomicValue other) throws XPathException {
//if (Type.subTypeOf(other.getType(), Type.STRING)) {
int cmp = Collations.compare(collator, value, other.getStringValue());
switch (operator) {
case Constants.EQ :
return cmp == 0;
case Constants.NEQ :
return cmp != 0;
case Constants.LT :
return cmp < 0;
case Constants.LTEQ :
return cmp <= 0;
case Constants.GT :
return cmp > 0;
case Constants.GTEQ :
return cmp >= 0;
default :
throw new XPathException("Type error: cannot apply operand to string value");
}
//}
//throw new XPathException(
// "Type error: operands are not comparable; expected xs:string; got "
// + Type.getTypeName(other.getType()));
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#compareTo(org.exist.xquery.value.AtomicValue)
*/
public int compareTo(Collator collator, AtomicValue other) throws XPathException {
return Collations.compare(collator, value, other.getStringValue());
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#startsWith(org.exist.xquery.value.AtomicValue)
*/
public boolean startsWith(Collator collator, AtomicValue other) throws XPathException {
return Collations.startsWith(collator, value, other.getStringValue());
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#endsWith(org.exist.xquery.value.AtomicValue)
*/
public boolean endsWith(Collator collator, AtomicValue other) throws XPathException {
return Collations.endsWith(collator, value, other.getStringValue());
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#contains(org.exist.xquery.value.AtomicValue)
*/
public boolean contains(Collator collator, AtomicValue other) throws XPathException {
return Collations.indexOf(collator, value, other.getStringValue()) != Constants.STRING_NOT_FOUND;
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#effectiveBooleanValue()
*/
public boolean effectiveBooleanValue() throws XPathException {
return value.length() > 0;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
public String toString() {
return value;
}
public final static String normalizeWhitespace(CharSequence seq) {
StringBuffer copy = new StringBuffer(seq.length());
char ch;
for (int i = 0; i < seq.length(); i++) {
ch = seq.charAt(i);
switch (ch) {
case '\n' :
case '\r' :
case '\t' :
copy.append(' ');
break;
default :
copy.append(ch);
}
}
return copy.toString();
}
/**
* Collapses all sequences of adjacent whitespace chars in the input string
* into a single space.
*
* @param in
*/
public static String collapseWhitespace(CharSequence in) {
if (in.length() == 0) {
return in.toString();
}
int i = 0;
// this method is performance critical, so first test if we need to collapse at all
for (; i < in.length(); i++) {
char c = in.charAt(i);
if(XMLChar.isSpace(c)) {
if(i + 1 < in.length() && XMLChar.isSpace(in.charAt(i + 1)))
break;
}
}
if(i == in.length())
// no whitespace to collapse, just return
return in.toString();
// start to collapse whitespace
StringBuffer sb = new StringBuffer(in.length());
sb.append(in.subSequence(0, i + 1).toString());
boolean inWhitespace = true;
for (; i < in.length(); i++) {
char c = in.charAt(i);
if(XMLChar.isSpace(c)) {
if (inWhitespace) {
// remove the whitespace
} else {
sb.append(' ');
inWhitespace = true;
}
} else {
sb.append(c);
inWhitespace = false;
}
}
if (sb.charAt(sb.length() - 1) == ' ') {
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
public final static String trimWhitespace(String in) {
if (in.length()==0) {
return in;
}
int first = 0;
int last = in.length() - 1;
while (in.charAt(first) <= 0x20) {
if (first++ >= last) {
return "";
}
}
while (in.charAt(last) <= 0x20) {
last--;
}
return in.substring(first, last+1);
}
public final static String expand(CharSequence seq) throws XPathException {
StringBuffer buf = new StringBuffer(seq.length());
StringBuffer entityRef = null;
char ch;
for (int i = 0; i < seq.length(); i++) {
ch = seq.charAt(i);
switch (ch) {
case '&' :
if (entityRef == null)
entityRef = new StringBuffer();
else
entityRef.setLength(0);
if ((i+1)==seq.length()) {
throw new XPathException("XPST0003 : Ampersands (&) must be escaped.");
}
if ((i+2)==seq.length()) {
throw new XPathException("XPST0003 : Ampersands (&) must be escaped (missing ;).");
}
ch = seq.charAt(i+1);
if (ch!='#') {
if (!Character.isLetter(ch)) {
throw new XPathException("XPST0003 : Ampersands (&) must be escaped (following character was not a name start character).");
}
entityRef.append(ch);
boolean found = false;
for (int j = i + 2; j < seq.length(); j++) {
ch = seq.charAt(j);
if (ch != ';' && (ch=='.' || ch=='_' || ch=='-' || Character.isLetterOrDigit(ch))) {
entityRef.append(ch);
} else if (ch==';') {
found = true;
i = j;
break;
} else {
break;
}
}
if (found) {
buf.append(expandEntity(entityRef.toString()));
} else {
throw new XPathException("XPST0003 : Invalid character in entity name ("+ch+") or missing ;");
}
} else {
entityRef.append(ch);
ch = seq.charAt(i+2);
boolean found = false;
if (ch=='x') {
entityRef.append(ch);
// hex number
for (int j = i + 3; j < seq.length(); j++) {
ch = seq.charAt(j);
if (ch != ';' && (ch=='0' || ch=='1' || ch=='2' || ch=='3' || ch=='4' || ch=='5' || ch=='6' || ch=='7' || ch=='8' || ch=='9' ||
ch=='a' || ch=='b' || ch=='c' || ch=='d' || ch=='e' || ch=='f' ||
ch=='A' || ch=='B' || ch=='C' || ch=='D' || ch=='E' || ch=='F')) {
entityRef.append(ch);
} else if (ch==';') {
found = true;
i = j;
break;
} else {
break;
}
}
} else {
// decimal number
for (int j = i + 2; j < seq.length(); j++) {
ch = seq.charAt(j);
if (ch != ';' && (ch=='0' || ch=='1' || ch=='2' || ch=='3' || ch=='4' || ch=='5' || ch=='6' || ch=='7' || ch=='8' || ch=='9')) {
entityRef.append(ch);
} else if (ch==';') {
found = true;
i = j;
break;
} else {
break;
}
}
}
if (found) {
buf.append(expandEntity(entityRef.toString()));
} else {
throw new XPathException("XPST0003 : Invalid character in character reference ("+ch+") or missing ;");
}
}
break;
case '\r':
// drop carriage returns
if ((i+1)!=seq.length()) {
ch = seq.charAt(i+1);
if (ch!='\n') {
buf.append('\n');
}
}
break;
default :
buf.append(ch);
}
}
return buf.toString();
}
private final static char expandEntity(String buf) throws XPathException {
if (buf.equals("amp"))
return '&';
else if (buf.equals("lt"))
return '<';
else if (buf.equals("gt"))
return '>';
else if (buf.equals("quot"))
return '"';
else if (buf.equals("apos"))
return '\'';
else if (buf.length() > 1 && buf.charAt(0) == '#')
return expandCharRef(buf.substring(1));
else
throw new XPathException("Unknown entity reference: " + buf);
}
private final static char expandCharRef(String buf) throws XPathException {
try {
int charNumber;
if (buf.length() > 1 && buf.charAt(0) == 'x') {
// Hex
charNumber = Integer.parseInt(buf.substring(1), 16);
} else {
charNumber = Integer.parseInt(buf);
}
if (charNumber==0) {
throw new XPathException("XQST0090 : Character number zero (0) is not allowed.");
}
return (char)charNumber;
} catch (NumberFormatException e) {
throw new XPathException("Unknown character reference: " + buf);
}
}
/* (non-Javadoc)
* @see org.exist.xquery.value.AtomicValue#max(org.exist.xquery.value.AtomicValue)
*/
public AtomicValue max(Collator collator, AtomicValue other) throws XPathException {
if (Type.subTypeOf(other.getType(), Type.STRING))
return Collations.compare(collator, value, ((StringValue) other).value) > 0 ? this : other;
else
return Collations.compare(collator, value, ((StringValue) other.convertTo(getType())).value) > 0
? this
: other;
}
public AtomicValue min(Collator collator, AtomicValue other) throws XPathException {
if (Type.subTypeOf(other.getType(), Type.STRING))
return Collations.compare(collator, value, ((StringValue) other).value) < 0 ? this : other;
else
return Collations.compare(collator, value, ((StringValue) other.convertTo(getType())).value) < 0
? this
: other;
}
/* (non-Javadoc)
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
public int compareTo(Object o) {
AtomicValue other = (AtomicValue)o;
if(Type.subTypeOf(other.getType(), Type.STRING))
return value.compareTo(((StringValue)other).value);
else
return getType() > other.getType() ? 1 : -1;
}
/** Serialize for the persistant storage
* @param offset
* */
public byte[] serializeValue( int offset, boolean caseSensitive) {
final String val = caseSensitive ? value : value.toLowerCase();
final byte[] data = new byte[ offset + 1 + UTF8.encoded(val) ];
data[offset] = (byte) type; // TODO: cast to byte is not safe
UTF8.encode(val, data, offset+1);
return data;
}
}
| Fixed return type of String-derived atomic values.
svn path=/trunk/eXist/; revision=5452
| src/org/exist/xquery/value/StringValue.java | Fixed return type of String-derived atomic values. | <ide><path>rc/org/exist/xquery/value/StringValue.java
<ide> * @see org.exist.xquery.value.AtomicValue#getType()
<ide> */
<ide> public int getType() {
<del> return Type.STRING;
<add> return type;
<ide> }
<ide>
<ide> /* (non-Javadoc) |
|
Java | mit | 25935b84df18f20fe1c8f95a0701ccd594d537fd | 0 | hydrated/SwipeRevealLayout,chthai64/SwipeRevealLayout | /**
The MIT License (MIT)
Copyright (c) 2016 Chau Thai
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package com.chauthai.swipereveallayout;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Rect;
import android.support.v4.view.GestureDetectorCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.widget.ViewDragHelper;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
@SuppressLint("RtlHardcoded")
public class SwipeRevealLayout extends ViewGroup {
// These states are used only for ViewBindHelper
protected static final int STATE_CLOSE = 0;
protected static final int STATE_CLOSING = 1;
protected static final int STATE_OPEN = 2;
protected static final int STATE_OPENING = 3;
protected static final int STATE_DRAGGING = 4;
private static final int DEFAULT_MIN_FLING_VELOCITY = 300; // dp per second
public static final int DRAG_EDGE_LEFT = 0x1;
public static final int DRAG_EDGE_RIGHT = 0x1 << 1;
public static final int DRAG_EDGE_TOP = 0x1 << 2;
public static final int DRAG_EDGE_BOTTOM = 0x1 << 3;
/**
* The secondary view will be under the main view.
*/
public static final int MODE_NORMAL = 0;
/**
* The secondary view will stick the edge of the main view.
*/
public static final int MODE_SAME_LEVEL = 1;
/**
* Main view is the view which is shown when the layout is closed.
*/
private View mMainView;
/**
* Secondary view is the view which is shown when the layout is opened.
*/
private View mSecondaryView;
/**
* The rectangle position of the main view when the layout is closed.
*/
private Rect mRectMainClose = new Rect();
/**
* The rectangle position of the main view when the layout is opened.
*/
private Rect mRectMainOpen = new Rect();
/**
* The rectangle position of the secondary view when the layout is closed.
*/
private Rect mRectSecClose = new Rect();
/**
* The rectangle position of the secondary view when the layout is opened.
*/
private Rect mRectSecOpen = new Rect();
private boolean mIsOpenBeforeInit = false;
private volatile boolean mAborted = false;
private volatile boolean mIsScrolling = false;
private volatile boolean mLockDrag = false;
private int mMinFlingVelocity = DEFAULT_MIN_FLING_VELOCITY;
private int mState = STATE_CLOSE;
private int mMode = MODE_NORMAL;
private int mLastMainLeft = 0;
private int mLastMainTop = 0;
private int mDragEdge = DRAG_EDGE_LEFT;
private ViewDragHelper mDragHelper;
private GestureDetectorCompat mGestureDetector;
private DragStateChangeListener mDragStateChangeListener; // only used for ViewBindHelper
private SwipeListener mSwipeListener;
interface DragStateChangeListener {
void onDragStateChanged(int state);
}
/**
* Listener for monitoring events about swipe layout.
*/
public interface SwipeListener {
/**
* Called when the main view becomes completely closed.
*/
void onClosed(SwipeRevealLayout view);
/**
* Called when the main view becomes completely opened.
*/
void onOpened(SwipeRevealLayout view);
/**
* Called when the main view's position changes.
* @param slideOffset The new offset of the main view within its range, from 0-1
*/
void onSlide(SwipeRevealLayout view, float slideOffset);
}
/**
* No-op stub for {@link SwipeListener}. If you only want ot implement a subset
* of the listener methods, you can extend this instead of implement the full interface.
*/
public static class SimpleSwipeListener implements SwipeListener {
@Override
public void onClosed(SwipeRevealLayout view) {}
@Override
public void onOpened(SwipeRevealLayout view) {}
@Override
public void onSlide(SwipeRevealLayout view, float slideOffset) {}
}
public SwipeRevealLayout(Context context) {
super(context);
init(context, null);
}
public SwipeRevealLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public SwipeRevealLayout(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
mGestureDetector.onTouchEvent(event);
mDragHelper.processTouchEvent(event);
return true;
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
mDragHelper.processTouchEvent(ev);
mGestureDetector.onTouchEvent(ev);
boolean settling = mDragHelper.getViewDragState() == ViewDragHelper.STATE_SETTLING;
boolean idleAfterScrolled = mDragHelper.getViewDragState() == ViewDragHelper.STATE_IDLE
&& mIsScrolling;
return settling || idleAfterScrolled;
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
// get views
if (getChildCount() >= 2) {
mSecondaryView = getChildAt(0);
mMainView = getChildAt(1);
}
else if (getChildCount() == 1) {
mMainView = getChildAt(0);
}
}
/**
* {@inheritDoc}
*/
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
mAborted = false;
for (int index = 0; index < getChildCount(); index++) {
final View child = getChildAt(index);
int left, right, top, bottom;
left = right = top = bottom = 0;
final int minLeft = getPaddingLeft();
final int maxRight = Math.max(r - getPaddingRight() - l, 0);
final int minTop = getPaddingTop();
final int maxBottom = Math.max(b - getPaddingBottom() - t, 0);
switch (mDragEdge) {
case DRAG_EDGE_RIGHT:
left = Math.max(r - child.getMeasuredWidth() - getPaddingRight() - l, minLeft);
top = Math.min(getPaddingTop(), maxBottom);
right = Math.max(r - getPaddingRight() - l, minLeft);
bottom = Math.min(child.getMeasuredHeight() + getPaddingTop(), maxBottom);
break;
case DRAG_EDGE_LEFT:
left = Math.min(getPaddingLeft(), maxRight);
top = Math.min(getPaddingTop(), maxBottom);
right = Math.min(child.getMeasuredWidth() + getPaddingLeft(), maxRight);
bottom = Math.min(child.getMeasuredHeight() + getPaddingTop(), maxBottom);
break;
case DRAG_EDGE_TOP:
left = Math.min(getPaddingLeft(), maxRight);
top = Math.min(getPaddingTop(), maxBottom);
right = Math.min(child.getMeasuredWidth() + getPaddingLeft(), maxRight);
bottom = Math.min(child.getMeasuredHeight() + getPaddingTop(), maxBottom);
break;
case DRAG_EDGE_BOTTOM:
left = Math.min(getPaddingLeft(), maxRight);
top = Math.max(b - child.getMeasuredHeight() - getPaddingBottom() - t, minTop);
right = Math.min(child.getMeasuredWidth() + getPaddingLeft(), maxRight);
bottom = Math.max(b - getPaddingBottom() - t, minTop);
break;
}
child.layout(left, top, right, bottom);
}
// taking account offset when mode is SAME_LEVEL
if (mMode == MODE_SAME_LEVEL) {
switch (mDragEdge) {
case DRAG_EDGE_LEFT:
mSecondaryView.offsetLeftAndRight(-mSecondaryView.getWidth());
break;
case DRAG_EDGE_RIGHT:
mSecondaryView.offsetLeftAndRight(mSecondaryView.getWidth());
break;
case DRAG_EDGE_TOP:
mSecondaryView.offsetTopAndBottom(-mSecondaryView.getHeight());
break;
case DRAG_EDGE_BOTTOM:
mSecondaryView.offsetTopAndBottom(mSecondaryView.getHeight());
}
}
initRects();
if (mIsOpenBeforeInit) {
open(false);
} else {
close(false);
}
mLastMainLeft = mMainView.getLeft();
mLastMainTop = mMainView.getTop();
}
/**
* {@inheritDoc}
*/
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (getChildCount() < 2) {
throw new RuntimeException("Layout must have two children");
}
final LayoutParams params = getLayoutParams();
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final int heightMode = MeasureSpec.getMode(heightMeasureSpec);
final int measuredWidth = MeasureSpec.getSize(widthMeasureSpec);
final int measuredHeight = MeasureSpec.getSize(heightMeasureSpec);
int desiredWidth = 0;
int desiredHeight = 0;
for (int i = 0; i < getChildCount(); i++) {
final View child = getChildAt(i);
final LayoutParams childParams = child.getLayoutParams();
if (childParams != null) {
if (childParams.height == LayoutParams.MATCH_PARENT) {
child.setMinimumHeight(measuredHeight);
}
if (childParams.width == LayoutParams.MATCH_PARENT) {
child.setMinimumWidth(measuredWidth);
}
}
measureChild(child, widthMeasureSpec, heightMeasureSpec);
desiredWidth = Math.max(child.getMeasuredWidth(), desiredWidth);
desiredHeight = Math.max(child.getMeasuredHeight(), desiredHeight);
}
// taking accounts of padding
desiredWidth += getPaddingLeft() + getPaddingRight();
desiredHeight += getPaddingTop() + getPaddingBottom();
// adjust desired width
if (widthMode == MeasureSpec.EXACTLY) {
desiredWidth = measuredWidth;
} else {
if (params.width == LayoutParams.MATCH_PARENT) {
desiredWidth = measuredWidth;
}
if (widthMode == MeasureSpec.AT_MOST) {
desiredWidth = (desiredWidth > measuredWidth)? measuredWidth : desiredWidth;
}
}
// adjust desired height
if (heightMode == MeasureSpec.EXACTLY) {
desiredHeight = measuredHeight;
} else {
if (params.height == LayoutParams.MATCH_PARENT) {
desiredHeight = measuredHeight;
}
if (heightMode == MeasureSpec.AT_MOST) {
desiredHeight = (desiredHeight > measuredHeight)? measuredHeight : desiredHeight;
}
}
setMeasuredDimension(desiredWidth, desiredHeight);
}
@Override
public void computeScroll() {
if (mDragHelper.continueSettling(true)) {
ViewCompat.postInvalidateOnAnimation(this);
}
}
/**
* Open the panel to show the secondary view
* @param animation true to animate the open motion. {@link SwipeListener} won't be
* called if is animation is false.
*/
public void open(boolean animation) {
mIsOpenBeforeInit = true;
mAborted = false;
if (animation) {
mState = STATE_OPENING;
mDragHelper.smoothSlideViewTo(mMainView, mRectMainOpen.left, mRectMainOpen.top);
if (mDragStateChangeListener != null) {
mDragStateChangeListener.onDragStateChanged(mState);
}
} else {
mState = STATE_OPEN;
mDragHelper.abort();
mMainView.layout(
mRectMainOpen.left,
mRectMainOpen.top,
mRectMainOpen.right,
mRectMainOpen.bottom
);
mSecondaryView.layout(
mRectSecOpen.left,
mRectSecOpen.top,
mRectSecOpen.right,
mRectSecOpen.bottom
);
}
ViewCompat.postInvalidateOnAnimation(SwipeRevealLayout.this);
}
/**
* Close the panel to hide the secondary view
* @param animation true to animate the close motion. {@link SwipeListener} won't be
* called if is animation is false.
*/
public void close(boolean animation) {
mIsOpenBeforeInit = false;
mAborted = false;
if (animation) {
mState = STATE_CLOSING;
mDragHelper.smoothSlideViewTo(mMainView, mRectMainClose.left, mRectMainClose.top);
if (mDragStateChangeListener != null) {
mDragStateChangeListener.onDragStateChanged(mState);
}
} else {
mState = STATE_CLOSE;
mDragHelper.abort();
mMainView.layout(
mRectMainClose.left,
mRectMainClose.top,
mRectMainClose.right,
mRectMainClose.bottom
);
mSecondaryView.layout(
mRectSecClose.left,
mRectSecClose.top,
mRectSecClose.right,
mRectSecClose.bottom
);
}
ViewCompat.postInvalidateOnAnimation(SwipeRevealLayout.this);
}
/**
* Set the minimum fling velocity to cause the layout to open/close.
* @param velocity dp per second
*/
public void setMinFlingVelocity(int velocity) {
mMinFlingVelocity = velocity;
}
/**
* Get the minimum fling velocity to cause the layout to open/close.
* @return dp per second
*/
public int getMinFlingVelocity() {
return mMinFlingVelocity;
}
/**
* Set the edge where the layout can be dragged from.
* @param dragEdge Can be one of these
* <ul>
* <li>{@link #DRAG_EDGE_LEFT}</li>
* <li>{@link #DRAG_EDGE_TOP}</li>
* <li>{@link #DRAG_EDGE_RIGHT}</li>
* <li>{@link #DRAG_EDGE_BOTTOM}</li>
* </ul>
*/
public void setDragEdge(int dragEdge) {
mDragEdge = dragEdge;
}
/**
* Get the edge where the layout can be dragged from.
* @return Can be one of these
* <ul>
* <li>{@link #DRAG_EDGE_LEFT}</li>
* <li>{@link #DRAG_EDGE_TOP}</li>
* <li>{@link #DRAG_EDGE_RIGHT}</li>
* <li>{@link #DRAG_EDGE_BOTTOM}</li>
* </ul>
*/
public int getDragEdge() {
return mDragEdge;
}
public void setSwipeListener(SwipeListener listener) {
mSwipeListener = listener;
}
/**
* @param lock if set to true, the user cannot drag/swipe the layout.
*/
public void setLockDrag(boolean lock) {
mLockDrag = lock;
}
/**
* @return true if the drag/swipe motion is currently locked.
*/
public boolean isDragLocked() {
return mLockDrag;
}
/**
* @return true if layout is fully opened, false otherwise.
*/
public boolean isOpened() {
return (mState == STATE_OPEN);
}
/**
* @return true if layout is fully closed, false otherwise.
*/
public boolean isClosed() {
return (mState == STATE_CLOSE);
}
/** Only used for {@link ViewBinderHelper} */
void setDragStateChangeListener(DragStateChangeListener listener) {
mDragStateChangeListener = listener;
}
/** Abort current motion in progress. Only used for {@link ViewBinderHelper} */
protected void abort() {
mAborted = true;
mDragHelper.abort();
}
private int getMainOpenLeft() {
switch (mDragEdge) {
case DRAG_EDGE_LEFT:
return mRectMainClose.left + mSecondaryView.getWidth();
case DRAG_EDGE_RIGHT:
return mRectMainClose.left - mSecondaryView.getWidth();
case DRAG_EDGE_TOP:
return mRectMainClose.left;
case DRAG_EDGE_BOTTOM:
return mRectMainClose.left;
default:
return 0;
}
}
private int getMainOpenTop() {
switch (mDragEdge) {
case DRAG_EDGE_LEFT:
return mRectMainClose.top;
case DRAG_EDGE_RIGHT:
return mRectMainClose.top;
case DRAG_EDGE_TOP:
return mRectMainClose.top + mSecondaryView.getHeight();
case DRAG_EDGE_BOTTOM:
return mRectMainClose.top - mSecondaryView.getHeight();
default:
return 0;
}
}
private int getSecOpenLeft() {
if (mMode == MODE_NORMAL || mDragEdge == DRAG_EDGE_BOTTOM || mDragEdge == DRAG_EDGE_TOP) {
return mRectSecClose.left;
}
if (mDragEdge == DRAG_EDGE_LEFT) {
return mRectSecClose.left + mSecondaryView.getWidth();
} else {
return mRectSecClose.left - mSecondaryView.getWidth();
}
}
private int getSecOpenTop() {
if (mMode == MODE_NORMAL || mDragEdge == DRAG_EDGE_LEFT || mDragEdge == DRAG_EDGE_RIGHT) {
return mRectSecClose.top;
}
if (mDragEdge == DRAG_EDGE_TOP) {
return mRectSecClose.top + mSecondaryView.getHeight();
} else {
return mRectSecClose.top - mSecondaryView.getHeight();
}
}
private void initRects() {
// close position of main view
mRectMainClose.set(
mMainView.getLeft(),
mMainView.getTop(),
mMainView.getRight(),
mMainView.getBottom()
);
// close position of secondary view
mRectSecClose.set(
mSecondaryView.getLeft(),
mSecondaryView.getTop(),
mSecondaryView.getRight(),
mSecondaryView.getBottom()
);
// open position of the main view
mRectMainOpen.set(
getMainOpenLeft(),
getMainOpenTop(),
getMainOpenLeft() + mMainView.getWidth(),
getMainOpenTop() + mMainView.getHeight()
);
// open position of the secondary view
mRectSecOpen.set(
getSecOpenLeft(),
getSecOpenTop(),
getSecOpenLeft() + mSecondaryView.getWidth(),
getSecOpenTop() + mSecondaryView.getHeight()
);
}
private void init(Context context, AttributeSet attrs) {
if (attrs != null && context != null) {
TypedArray a = context.getTheme().obtainStyledAttributes(
attrs,
R.styleable.SwipeRevealLayout,
0, 0
);
mDragEdge = a.getInteger(R.styleable.SwipeRevealLayout_dragEdge, DRAG_EDGE_LEFT);
mMinFlingVelocity = a.getInteger(R.styleable.SwipeRevealLayout_flingVelocity, DEFAULT_MIN_FLING_VELOCITY);
mMode = a.getInteger(R.styleable.SwipeRevealLayout_mode, MODE_NORMAL);
}
mDragHelper = ViewDragHelper.create(this, 1.0f, mDragHelperCallback);
mDragHelper.setEdgeTrackingEnabled(ViewDragHelper.EDGE_ALL);
mGestureDetector = new GestureDetectorCompat(context, mGestureListener);
}
private final GestureDetector.OnGestureListener mGestureListener = new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onDown(MotionEvent e) {
mIsScrolling = false;
return true;
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
mIsScrolling = true;
return false;
}
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
mIsScrolling = true;
if (getParent() != null) {
getParent().requestDisallowInterceptTouchEvent(true);
}
return false;
}
};
private int getHalfwayPivotHorizontal() {
if (mDragEdge == DRAG_EDGE_LEFT) {
return mRectMainClose.left + mSecondaryView.getWidth() / 2;
} else {
return mRectMainClose.right - mSecondaryView.getWidth() / 2;
}
}
private int getHalfwayPivotVertical() {
if (mDragEdge == DRAG_EDGE_TOP) {
return mRectMainClose.top + mSecondaryView.getHeight() / 2;
} else {
return mRectMainClose.bottom - mSecondaryView.getHeight() / 2;
}
}
private final ViewDragHelper.Callback mDragHelperCallback = new ViewDragHelper.Callback() {
@Override
public boolean tryCaptureView(View child, int pointerId) {
mAborted = false;
if (mLockDrag)
return false;
mDragHelper.captureChildView(mMainView, pointerId);
return false;
}
@Override
public int clampViewPositionVertical(View child, int top, int dy) {
switch (mDragEdge) {
case DRAG_EDGE_TOP:
return Math.max(
Math.min(top, mRectMainClose.top + mSecondaryView.getHeight()),
mRectMainClose.top
);
case DRAG_EDGE_BOTTOM:
return Math.max(
Math.min(top, mRectMainClose.top),
mRectMainClose.top - mSecondaryView.getHeight()
);
default:
return child.getTop();
}
}
@Override
public int clampViewPositionHorizontal(View child, int left, int dx) {
switch (mDragEdge) {
case DRAG_EDGE_RIGHT:
return Math.max(
Math.min(left, mRectMainClose.left),
mRectMainClose.left - mSecondaryView.getWidth()
);
case DRAG_EDGE_LEFT:
return Math.max(
Math.min(left, mRectMainClose.left + mSecondaryView.getWidth()),
mRectMainClose.left
);
default:
return child.getLeft();
}
}
@Override
public void onViewReleased(View releasedChild, float xvel, float yvel) {
final boolean velRightExceeded = pxToDp((int) xvel) >= mMinFlingVelocity;
final boolean velLeftExceeded = pxToDp((int) xvel) <= -mMinFlingVelocity;
final boolean velUpExceeded = pxToDp((int) yvel) <= -mMinFlingVelocity;
final boolean velDownExceeded = pxToDp((int) yvel) >= mMinFlingVelocity;
final int pivotHorizontal = getHalfwayPivotHorizontal();
final int pivotVertical = getHalfwayPivotVertical();
switch (mDragEdge) {
case DRAG_EDGE_RIGHT:
if (velRightExceeded) {
close(true);
} else if (velLeftExceeded) {
open(true);
} else {
if (mMainView.getRight() < pivotHorizontal) {
open(true);
} else {
close(true);
}
}
break;
case DRAG_EDGE_LEFT:
if (velRightExceeded) {
open(true);
} else if (velLeftExceeded) {
close(true);
} else {
if (mMainView.getLeft() < pivotHorizontal) {
close(true);
} else {
open(true);
}
}
break;
case DRAG_EDGE_TOP:
if (velUpExceeded) {
close(true);
} else if (velDownExceeded) {
open(true);
} else {
if (mMainView.getTop() < pivotVertical) {
close(true);
} else {
open(true);
}
}
break;
case DRAG_EDGE_BOTTOM:
if (velUpExceeded) {
open(true);
} else if (velDownExceeded) {
close(true);
} else {
if (mMainView.getBottom() < pivotVertical) {
open(true);
} else {
close(true);
}
}
break;
}
}
@Override
public void onEdgeDragStarted(int edgeFlags, int pointerId) {
super.onEdgeDragStarted(edgeFlags, pointerId);
if (mLockDrag) {
return;
}
boolean edgeStartLeft = (mDragEdge == DRAG_EDGE_RIGHT)
&& edgeFlags == ViewDragHelper.EDGE_LEFT;
boolean edgeStartRight = (mDragEdge == DRAG_EDGE_LEFT)
&& edgeFlags == ViewDragHelper.EDGE_RIGHT;
boolean edgeStartTop = (mDragEdge == DRAG_EDGE_BOTTOM)
&& edgeFlags == ViewDragHelper.EDGE_TOP;
boolean edgeStartBottom = (mDragEdge == DRAG_EDGE_TOP)
&& edgeFlags == ViewDragHelper.EDGE_BOTTOM;
if (edgeStartLeft || edgeStartRight || edgeStartTop || edgeStartBottom) {
mDragHelper.captureChildView(mMainView, pointerId);
}
}
@Override
public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) {
super.onViewPositionChanged(changedView, left, top, dx, dy);
if (mMode == MODE_SAME_LEVEL) {
if (mDragEdge == DRAG_EDGE_LEFT || mDragEdge == DRAG_EDGE_RIGHT) {
mSecondaryView.offsetLeftAndRight(dx);
} else {
mSecondaryView.offsetTopAndBottom(dy);
}
}
boolean isMoved = (mMainView.getLeft() != mLastMainLeft) || (mMainView.getTop() != mLastMainTop);
if (mSwipeListener != null && isMoved) {
if (mMainView.getLeft() == mRectMainClose.left && mMainView.getTop() == mRectMainClose.top) {
mSwipeListener.onClosed(SwipeRevealLayout.this);
}
else if (mMainView.getLeft() == mRectMainOpen.left && mMainView.getTop() == mRectMainOpen.top) {
mSwipeListener.onOpened(SwipeRevealLayout.this);
}
else {
mSwipeListener.onSlide(SwipeRevealLayout.this, getSlideOffset());
}
}
mLastMainLeft = mMainView.getLeft();
mLastMainTop = mMainView.getTop();
ViewCompat.postInvalidateOnAnimation(SwipeRevealLayout.this);
}
private float getSlideOffset() {
switch (mDragEdge) {
case DRAG_EDGE_LEFT:
return (float) (mMainView.getLeft() - mRectMainClose.left) / mSecondaryView.getWidth();
case DRAG_EDGE_RIGHT:
return (float) (mRectMainClose.left - mMainView.getLeft()) / mSecondaryView.getWidth();
case DRAG_EDGE_TOP:
return (float) (mMainView.getTop() - mRectMainClose.top) / mSecondaryView.getHeight();
case DRAG_EDGE_BOTTOM:
return (float) (mRectMainClose.top - mMainView.getTop()) / mSecondaryView.getHeight();
default:
return 0;
}
}
@Override
public void onViewDragStateChanged(int state) {
super.onViewDragStateChanged(state);
final int prevState = mState;
switch (state) {
case ViewDragHelper.STATE_DRAGGING:
mState = STATE_DRAGGING;
break;
case ViewDragHelper.STATE_IDLE:
// drag edge is left or right
if (mDragEdge == DRAG_EDGE_LEFT || mDragEdge == DRAG_EDGE_RIGHT) {
if (mMainView.getLeft() == mRectMainClose.left) {
mState = STATE_CLOSE;
} else {
mState = STATE_OPEN;
}
}
// drag edge is top or bottom
else {
if (mMainView.getTop() == mRectMainClose.top) {
mState = STATE_CLOSE;
} else {
mState = STATE_OPEN;
}
}
break;
}
if (mDragStateChangeListener != null && !mAborted && prevState != mState) {
mDragStateChangeListener.onDragStateChanged(mState);
}
}
};
public static String getStateString(int state) {
switch (state) {
case STATE_CLOSE:
return "state_close";
case STATE_CLOSING:
return "state_closing";
case STATE_OPEN:
return "state_open";
case STATE_OPENING:
return "state_opening";
case STATE_DRAGGING:
return "state_dragging";
default:
return "undefined";
}
}
private int pxToDp(int px) {
Resources resources = getContext().getResources();
DisplayMetrics metrics = resources.getDisplayMetrics();
return (int) (px / ((float)metrics.densityDpi / DisplayMetrics.DENSITY_DEFAULT));
}
}
| swipe-reveal-layout/src/main/java/com/chauthai/swipereveallayout/SwipeRevealLayout.java | /**
The MIT License (MIT)
Copyright (c) 2016 Chau Thai
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package com.chauthai.swipereveallayout;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Rect;
import android.support.v4.view.GestureDetectorCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.widget.ViewDragHelper;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
@SuppressLint("RtlHardcoded")
public class SwipeRevealLayout extends ViewGroup {
// These states are used only for ViewBindHelper
protected static final int STATE_CLOSE = 0;
protected static final int STATE_CLOSING = 1;
protected static final int STATE_OPEN = 2;
protected static final int STATE_OPENING = 3;
protected static final int STATE_DRAGGING = 4;
private static final int DEFAULT_MIN_FLING_VELOCITY = 300; // dp per second
public static final int DRAG_EDGE_LEFT = 0x1;
public static final int DRAG_EDGE_RIGHT = 0x1 << 1;
public static final int DRAG_EDGE_TOP = 0x1 << 2;
public static final int DRAG_EDGE_BOTTOM = 0x1 << 3;
/**
* The secondary view will be under the main view.
*/
public static final int MODE_NORMAL = 0;
/**
* The secondary view will stick the edge of the main view.
*/
public static final int MODE_SAME_LEVEL = 1;
/**
* Main view is the view which is shown when the layout is closed.
*/
private View mMainView;
/**
* Secondary view is the view which is shown when the layout is opened.
*/
private View mSecondaryView;
/**
* The rectangle position of the main view when the layout is closed.
*/
private Rect mRectMainClose = new Rect();
/**
* The rectangle position of the main view when the layout is opened.
*/
private Rect mRectMainOpen = new Rect();
/**
* The rectangle position of the secondary view when the layout is closed.
*/
private Rect mRectSecClose = new Rect();
/**
* The rectangle position of the secondary view when the layout is opened.
*/
private Rect mRectSecOpen = new Rect();
private boolean mIsOpenBeforeInit = false;
private volatile boolean mAborted = false;
private volatile boolean mIsScrolling = false;
private volatile boolean mLockDrag = false;
private int mMinFlingVelocity = DEFAULT_MIN_FLING_VELOCITY;
private int mState = STATE_CLOSE;
private int mMode = MODE_NORMAL;
private int mLastMainLeft = 0;
private int mLastMainTop = 0;
private int mDragEdge = DRAG_EDGE_LEFT;
private ViewDragHelper mDragHelper;
private GestureDetectorCompat mGestureDetector;
private DragStateChangeListener mDragStateChangeListener; // only used for ViewBindHelper
private SwipeListener mSwipeListener;
interface DragStateChangeListener {
void onDragStateChanged(int state);
}
/**
* Listener for monitoring events about swipe layout.
*/
public interface SwipeListener {
/**
* Called when the main view becomes completely closed.
*/
void onClosed(SwipeRevealLayout view);
/**
* Called when the main view becomes completely opened.
*/
void onOpened(SwipeRevealLayout view);
/**
* Called when the main view's position changes.
* @param slideOffset The new offset of the main view within its range, from 0-1
*/
void onSlide(SwipeRevealLayout view, float slideOffset);
}
/**
* No-op stub for {@link SwipeListener}. If you only want ot implement a subset
* of the listener methods, you can extend this instead of implement the full interface.
*/
public static class SimpleSwipeListener implements SwipeListener {
@Override
public void onClosed(SwipeRevealLayout view) {}
@Override
public void onOpened(SwipeRevealLayout view) {}
@Override
public void onSlide(SwipeRevealLayout view, float slideOffset) {}
}
public SwipeRevealLayout(Context context) {
super(context);
init(context, null);
}
public SwipeRevealLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
public SwipeRevealLayout(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
mGestureDetector.onTouchEvent(event);
mDragHelper.processTouchEvent(event);
return true;
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
mDragHelper.processTouchEvent(ev);
mGestureDetector.onTouchEvent(ev);
boolean settling = mDragHelper.getViewDragState() == ViewDragHelper.STATE_SETTLING;
boolean idleAfterScrolled = mDragHelper.getViewDragState() == ViewDragHelper.STATE_IDLE
&& mIsScrolling;
return settling || idleAfterScrolled;
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
// get views
if (getChildCount() >= 2) {
mSecondaryView = getChildAt(0);
mMainView = getChildAt(1);
}
else if (getChildCount() == 1) {
mMainView = getChildAt(0);
}
}
/**
* {@inheritDoc}
*/
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
mAborted = false;
for (int index = 0; index < getChildCount(); index++) {
final View child = getChildAt(index);
int left, right, top, bottom;
left = right = top = bottom = 0;
final int minLeft = getPaddingLeft();
final int maxRight = Math.max(r - getPaddingRight() - l, 0);
final int minTop = getPaddingTop();
final int maxBottom = Math.max(b - getPaddingBottom() - t, 0);
switch (mDragEdge) {
case DRAG_EDGE_RIGHT:
left = Math.max(r - child.getMeasuredWidth() - getPaddingRight() - l, minLeft);
top = Math.min(getPaddingTop(), maxBottom);
right = Math.max(r - getPaddingRight() - l, minLeft);
bottom = Math.min(child.getMeasuredHeight() + getPaddingTop(), maxBottom);
break;
case DRAG_EDGE_LEFT:
left = Math.min(getPaddingLeft(), maxRight);
top = Math.min(getPaddingTop(), maxBottom);
right = Math.min(child.getMeasuredWidth() + getPaddingLeft(), maxRight);
bottom = Math.min(child.getMeasuredHeight() + getPaddingTop(), maxBottom);
break;
case DRAG_EDGE_TOP:
left = Math.min(getPaddingLeft(), maxRight);
top = Math.min(getPaddingTop(), maxBottom);
right = Math.min(child.getMeasuredWidth() + getPaddingLeft(), maxRight);
bottom = Math.min(child.getMeasuredHeight() + getPaddingTop(), maxBottom);
break;
case DRAG_EDGE_BOTTOM:
left = Math.min(getPaddingLeft(), maxRight);
top = Math.max(b - child.getMeasuredHeight() - getPaddingBottom() - t, minTop);
right = Math.min(child.getMeasuredWidth() + getPaddingLeft(), maxRight);
bottom = Math.max(b - getPaddingBottom() - t, minTop);
break;
}
child.layout(left, top, right, bottom);
}
// taking account offset when mode is SAME_LEVEL
if (mMode == MODE_SAME_LEVEL) {
switch (mDragEdge) {
case DRAG_EDGE_LEFT:
mSecondaryView.offsetLeftAndRight(-mSecondaryView.getWidth());
break;
case DRAG_EDGE_RIGHT:
mSecondaryView.offsetLeftAndRight(mSecondaryView.getWidth());
break;
case DRAG_EDGE_TOP:
mSecondaryView.offsetTopAndBottom(-mSecondaryView.getHeight());
break;
case DRAG_EDGE_BOTTOM:
mSecondaryView.offsetTopAndBottom(mSecondaryView.getHeight());
}
}
initRects();
if (mIsOpenBeforeInit) {
open(false);
} else {
close(false);
}
mLastMainLeft = mMainView.getLeft();
mLastMainTop = mMainView.getTop();
}
/**
* {@inheritDoc}
*/
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (getChildCount() < 2) {
throw new RuntimeException("Layout must have two children");
}
final LayoutParams params = getLayoutParams();
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final int heightMode = MeasureSpec.getMode(heightMeasureSpec);
final int measuredWidth = MeasureSpec.getSize(widthMeasureSpec);
final int measuredHeight = MeasureSpec.getSize(heightMeasureSpec);
int desiredWidth = 0;
int desiredHeight = 0;
for (int i = 0; i < getChildCount(); i++) {
final View child = getChildAt(i);
final LayoutParams childParams = child.getLayoutParams();
if (childParams != null) {
if (childParams.height == LayoutParams.MATCH_PARENT) {
child.setMinimumHeight(measuredHeight);
}
if (childParams.width == LayoutParams.MATCH_PARENT) {
child.setMinimumWidth(measuredWidth);
}
}
measureChild(child, widthMeasureSpec, heightMeasureSpec);
desiredWidth = Math.max(child.getMeasuredWidth(), desiredWidth);
desiredHeight = Math.max(child.getMeasuredHeight(), desiredHeight);
}
// taking accounts of padding
desiredWidth += getPaddingLeft() + getPaddingRight();
desiredHeight += getPaddingTop() + getPaddingBottom();
// adjust desired width
if (widthMode == MeasureSpec.EXACTLY) {
desiredWidth = measuredWidth;
} else {
if (params.width == LayoutParams.MATCH_PARENT) {
desiredWidth = measuredWidth;
}
if (widthMode == MeasureSpec.AT_MOST) {
desiredWidth = (desiredWidth > measuredWidth)? measuredWidth : desiredWidth;
}
}
// adjust desired height
if (heightMode == MeasureSpec.EXACTLY) {
desiredHeight = measuredHeight;
} else {
if (params.height == LayoutParams.MATCH_PARENT) {
desiredHeight = measuredHeight;
}
if (heightMode == MeasureSpec.AT_MOST) {
desiredHeight = (desiredHeight > measuredHeight)? measuredHeight : desiredHeight;
}
}
setMeasuredDimension(desiredWidth, desiredHeight);
}
@Override
public void computeScroll() {
if (mDragHelper.continueSettling(true)) {
ViewCompat.postInvalidateOnAnimation(this);
}
}
/**
* Open the panel to show the secondary view
* @param animation true to animate the open motion. {@link SwipeListener} won't be
* called if is animation is false.
*/
public void open(boolean animation) {
mIsOpenBeforeInit = true;
mAborted = false;
if (animation) {
mState = STATE_OPENING;
mDragHelper.smoothSlideViewTo(mMainView, mRectMainOpen.left, mRectMainOpen.top);
if (mDragStateChangeListener != null) {
mDragStateChangeListener.onDragStateChanged(mState);
}
} else {
mState = STATE_OPEN;
mDragHelper.abort();
mMainView.layout(
mRectMainOpen.left,
mRectMainOpen.top,
mRectMainOpen.right,
mRectMainOpen.bottom
);
mSecondaryView.layout(
mRectSecOpen.left,
mRectSecOpen.top,
mRectSecOpen.right,
mRectSecOpen.bottom
);
}
ViewCompat.postInvalidateOnAnimation(SwipeRevealLayout.this);
}
/**
* Close the panel to hide the secondary view
* @param animation true to animate the close motion. {@link SwipeListener} won't be
* called if is animation is false.
*/
public void close(boolean animation) {
mIsOpenBeforeInit = false;
mAborted = false;
if (animation) {
mState = STATE_CLOSING;
mDragHelper.smoothSlideViewTo(mMainView, mRectMainClose.left, mRectMainClose.top);
if (mDragStateChangeListener != null) {
mDragStateChangeListener.onDragStateChanged(mState);
}
} else {
mState = STATE_CLOSE;
mDragHelper.abort();
mMainView.layout(
mRectMainClose.left,
mRectMainClose.top,
mRectMainClose.right,
mRectMainClose.bottom
);
mSecondaryView.layout(
mRectSecClose.left,
mRectSecClose.top,
mRectSecClose.right,
mRectSecClose.bottom
);
}
ViewCompat.postInvalidateOnAnimation(SwipeRevealLayout.this);
}
/**
* Set the minimum fling velocity to cause the layout to open/close.
* @param velocity dp per second
*/
public void setMinFlingVelocity(int velocity) {
mMinFlingVelocity = velocity;
}
/**
* Get the minimum fling velocity to cause the layout to open/close.
* @return dp per second
*/
public int getMinFlingVelocity() {
return mMinFlingVelocity;
}
/**
* Set the edge where the layout can be dragged from.
* @param dragEdge Can be one of these
* <ul>
* <li>{@link #DRAG_EDGE_LEFT}</li>
* <li>{@link #DRAG_EDGE_TOP}</li>
* <li>{@link #DRAG_EDGE_RIGHT}</li>
* <li>{@link #DRAG_EDGE_BOTTOM}</li>
* </ul>
*/
public void setDragEdge(int dragEdge) {
mDragEdge = dragEdge;
}
/**
* Get the edge where the layout can be dragged from.
* @return Can be one of these
* <ul>
* <li>{@link #DRAG_EDGE_LEFT}</li>
* <li>{@link #DRAG_EDGE_TOP}</li>
* <li>{@link #DRAG_EDGE_RIGHT}</li>
* <li>{@link #DRAG_EDGE_BOTTOM}</li>
* </ul>
*/
public int getDragEdge() {
return mDragEdge;
}
public void setSwipeListener(SwipeListener listener) {
mSwipeListener = listener;
}
/**
* @param lock if set to true, the user cannot drag/swipe the layout.
*/
public void setLockDrag(boolean lock) {
mLockDrag = lock;
}
/**
* @return true if the drag/swipe motion is currently locked.
*/
public boolean isDragLocked() {
return mLockDrag;
}
/**
* @return true if the state is opened, false otherwise.
*/
public boolean isOpened() {
return (mState == STATE_OPEN);
}
/** Only used for {@link ViewBinderHelper} */
void setDragStateChangeListener(DragStateChangeListener listener) {
mDragStateChangeListener = listener;
}
/** Abort current motion in progress. Only used for {@link ViewBinderHelper} */
protected void abort() {
mAborted = true;
mDragHelper.abort();
}
private int getMainOpenLeft() {
switch (mDragEdge) {
case DRAG_EDGE_LEFT:
return mRectMainClose.left + mSecondaryView.getWidth();
case DRAG_EDGE_RIGHT:
return mRectMainClose.left - mSecondaryView.getWidth();
case DRAG_EDGE_TOP:
return mRectMainClose.left;
case DRAG_EDGE_BOTTOM:
return mRectMainClose.left;
default:
return 0;
}
}
private int getMainOpenTop() {
switch (mDragEdge) {
case DRAG_EDGE_LEFT:
return mRectMainClose.top;
case DRAG_EDGE_RIGHT:
return mRectMainClose.top;
case DRAG_EDGE_TOP:
return mRectMainClose.top + mSecondaryView.getHeight();
case DRAG_EDGE_BOTTOM:
return mRectMainClose.top - mSecondaryView.getHeight();
default:
return 0;
}
}
private int getSecOpenLeft() {
if (mMode == MODE_NORMAL || mDragEdge == DRAG_EDGE_BOTTOM || mDragEdge == DRAG_EDGE_TOP) {
return mRectSecClose.left;
}
if (mDragEdge == DRAG_EDGE_LEFT) {
return mRectSecClose.left + mSecondaryView.getWidth();
} else {
return mRectSecClose.left - mSecondaryView.getWidth();
}
}
private int getSecOpenTop() {
if (mMode == MODE_NORMAL || mDragEdge == DRAG_EDGE_LEFT || mDragEdge == DRAG_EDGE_RIGHT) {
return mRectSecClose.top;
}
if (mDragEdge == DRAG_EDGE_TOP) {
return mRectSecClose.top + mSecondaryView.getHeight();
} else {
return mRectSecClose.top - mSecondaryView.getHeight();
}
}
private void initRects() {
// close position of main view
mRectMainClose.set(
mMainView.getLeft(),
mMainView.getTop(),
mMainView.getRight(),
mMainView.getBottom()
);
// close position of secondary view
mRectSecClose.set(
mSecondaryView.getLeft(),
mSecondaryView.getTop(),
mSecondaryView.getRight(),
mSecondaryView.getBottom()
);
// open position of the main view
mRectMainOpen.set(
getMainOpenLeft(),
getMainOpenTop(),
getMainOpenLeft() + mMainView.getWidth(),
getMainOpenTop() + mMainView.getHeight()
);
// open position of the secondary view
mRectSecOpen.set(
getSecOpenLeft(),
getSecOpenTop(),
getSecOpenLeft() + mSecondaryView.getWidth(),
getSecOpenTop() + mSecondaryView.getHeight()
);
}
private void init(Context context, AttributeSet attrs) {
if (attrs != null && context != null) {
TypedArray a = context.getTheme().obtainStyledAttributes(
attrs,
R.styleable.SwipeRevealLayout,
0, 0
);
mDragEdge = a.getInteger(R.styleable.SwipeRevealLayout_dragEdge, DRAG_EDGE_LEFT);
mMinFlingVelocity = a.getInteger(R.styleable.SwipeRevealLayout_flingVelocity, DEFAULT_MIN_FLING_VELOCITY);
mMode = a.getInteger(R.styleable.SwipeRevealLayout_mode, MODE_NORMAL);
}
mDragHelper = ViewDragHelper.create(this, 1.0f, mDragHelperCallback);
mDragHelper.setEdgeTrackingEnabled(ViewDragHelper.EDGE_ALL);
mGestureDetector = new GestureDetectorCompat(context, mGestureListener);
}
private final GestureDetector.OnGestureListener mGestureListener = new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onDown(MotionEvent e) {
mIsScrolling = false;
return true;
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
mIsScrolling = true;
return false;
}
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
mIsScrolling = true;
if (getParent() != null) {
getParent().requestDisallowInterceptTouchEvent(true);
}
return false;
}
};
private int getHalfwayPivotHorizontal() {
if (mDragEdge == DRAG_EDGE_LEFT) {
return mRectMainClose.left + mSecondaryView.getWidth() / 2;
} else {
return mRectMainClose.right - mSecondaryView.getWidth() / 2;
}
}
private int getHalfwayPivotVertical() {
if (mDragEdge == DRAG_EDGE_TOP) {
return mRectMainClose.top + mSecondaryView.getHeight() / 2;
} else {
return mRectMainClose.bottom - mSecondaryView.getHeight() / 2;
}
}
private final ViewDragHelper.Callback mDragHelperCallback = new ViewDragHelper.Callback() {
@Override
public boolean tryCaptureView(View child, int pointerId) {
mAborted = false;
if (mLockDrag)
return false;
mDragHelper.captureChildView(mMainView, pointerId);
return false;
}
@Override
public int clampViewPositionVertical(View child, int top, int dy) {
switch (mDragEdge) {
case DRAG_EDGE_TOP:
return Math.max(
Math.min(top, mRectMainClose.top + mSecondaryView.getHeight()),
mRectMainClose.top
);
case DRAG_EDGE_BOTTOM:
return Math.max(
Math.min(top, mRectMainClose.top),
mRectMainClose.top - mSecondaryView.getHeight()
);
default:
return child.getTop();
}
}
@Override
public int clampViewPositionHorizontal(View child, int left, int dx) {
switch (mDragEdge) {
case DRAG_EDGE_RIGHT:
return Math.max(
Math.min(left, mRectMainClose.left),
mRectMainClose.left - mSecondaryView.getWidth()
);
case DRAG_EDGE_LEFT:
return Math.max(
Math.min(left, mRectMainClose.left + mSecondaryView.getWidth()),
mRectMainClose.left
);
default:
return child.getLeft();
}
}
@Override
public void onViewReleased(View releasedChild, float xvel, float yvel) {
final boolean velRightExceeded = pxToDp((int) xvel) >= mMinFlingVelocity;
final boolean velLeftExceeded = pxToDp((int) xvel) <= -mMinFlingVelocity;
final boolean velUpExceeded = pxToDp((int) yvel) <= -mMinFlingVelocity;
final boolean velDownExceeded = pxToDp((int) yvel) >= mMinFlingVelocity;
final int pivotHorizontal = getHalfwayPivotHorizontal();
final int pivotVertical = getHalfwayPivotVertical();
switch (mDragEdge) {
case DRAG_EDGE_RIGHT:
if (velRightExceeded) {
close(true);
} else if (velLeftExceeded) {
open(true);
} else {
if (mMainView.getRight() < pivotHorizontal) {
open(true);
} else {
close(true);
}
}
break;
case DRAG_EDGE_LEFT:
if (velRightExceeded) {
open(true);
} else if (velLeftExceeded) {
close(true);
} else {
if (mMainView.getLeft() < pivotHorizontal) {
close(true);
} else {
open(true);
}
}
break;
case DRAG_EDGE_TOP:
if (velUpExceeded) {
close(true);
} else if (velDownExceeded) {
open(true);
} else {
if (mMainView.getTop() < pivotVertical) {
close(true);
} else {
open(true);
}
}
break;
case DRAG_EDGE_BOTTOM:
if (velUpExceeded) {
open(true);
} else if (velDownExceeded) {
close(true);
} else {
if (mMainView.getBottom() < pivotVertical) {
open(true);
} else {
close(true);
}
}
break;
}
}
@Override
public void onEdgeDragStarted(int edgeFlags, int pointerId) {
super.onEdgeDragStarted(edgeFlags, pointerId);
if (mLockDrag) {
return;
}
boolean edgeStartLeft = (mDragEdge == DRAG_EDGE_RIGHT)
&& edgeFlags == ViewDragHelper.EDGE_LEFT;
boolean edgeStartRight = (mDragEdge == DRAG_EDGE_LEFT)
&& edgeFlags == ViewDragHelper.EDGE_RIGHT;
boolean edgeStartTop = (mDragEdge == DRAG_EDGE_BOTTOM)
&& edgeFlags == ViewDragHelper.EDGE_TOP;
boolean edgeStartBottom = (mDragEdge == DRAG_EDGE_TOP)
&& edgeFlags == ViewDragHelper.EDGE_BOTTOM;
if (edgeStartLeft || edgeStartRight || edgeStartTop || edgeStartBottom) {
mDragHelper.captureChildView(mMainView, pointerId);
}
}
@Override
public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) {
super.onViewPositionChanged(changedView, left, top, dx, dy);
if (mMode == MODE_SAME_LEVEL) {
if (mDragEdge == DRAG_EDGE_LEFT || mDragEdge == DRAG_EDGE_RIGHT) {
mSecondaryView.offsetLeftAndRight(dx);
} else {
mSecondaryView.offsetTopAndBottom(dy);
}
}
boolean isMoved = (mMainView.getLeft() != mLastMainLeft) || (mMainView.getTop() != mLastMainTop);
if (mSwipeListener != null && isMoved) {
if (mMainView.getLeft() == mRectMainClose.left && mMainView.getTop() == mRectMainClose.top) {
mSwipeListener.onClosed(SwipeRevealLayout.this);
}
else if (mMainView.getLeft() == mRectMainOpen.left && mMainView.getTop() == mRectMainOpen.top) {
mSwipeListener.onOpened(SwipeRevealLayout.this);
}
else {
mSwipeListener.onSlide(SwipeRevealLayout.this, getSlideOffset());
}
}
mLastMainLeft = mMainView.getLeft();
mLastMainTop = mMainView.getTop();
ViewCompat.postInvalidateOnAnimation(SwipeRevealLayout.this);
}
private float getSlideOffset() {
switch (mDragEdge) {
case DRAG_EDGE_LEFT:
return (float) (mMainView.getLeft() - mRectMainClose.left) / mSecondaryView.getWidth();
case DRAG_EDGE_RIGHT:
return (float) (mRectMainClose.left - mMainView.getLeft()) / mSecondaryView.getWidth();
case DRAG_EDGE_TOP:
return (float) (mMainView.getTop() - mRectMainClose.top) / mSecondaryView.getHeight();
case DRAG_EDGE_BOTTOM:
return (float) (mRectMainClose.top - mMainView.getTop()) / mSecondaryView.getHeight();
default:
return 0;
}
}
@Override
public void onViewDragStateChanged(int state) {
super.onViewDragStateChanged(state);
final int prevState = mState;
switch (state) {
case ViewDragHelper.STATE_DRAGGING:
mState = STATE_DRAGGING;
break;
case ViewDragHelper.STATE_IDLE:
// drag edge is left or right
if (mDragEdge == DRAG_EDGE_LEFT || mDragEdge == DRAG_EDGE_RIGHT) {
if (mMainView.getLeft() == mRectMainClose.left) {
mState = STATE_CLOSE;
} else {
mState = STATE_OPEN;
}
}
// drag edge is top or bottom
else {
if (mMainView.getTop() == mRectMainClose.top) {
mState = STATE_CLOSE;
} else {
mState = STATE_OPEN;
}
}
break;
}
if (mDragStateChangeListener != null && !mAborted && prevState != mState) {
mDragStateChangeListener.onDragStateChanged(mState);
}
}
};
public static String getStateString(int state) {
switch (state) {
case STATE_CLOSE:
return "state_close";
case STATE_CLOSING:
return "state_closing";
case STATE_OPEN:
return "state_open";
case STATE_OPENING:
return "state_opening";
case STATE_DRAGGING:
return "state_dragging";
default:
return "undefined";
}
}
private int pxToDp(int px) {
Resources resources = getContext().getResources();
DisplayMetrics metrics = resources.getDisplayMetrics();
return (int) (px / ((float)metrics.densityDpi / DisplayMetrics.DENSITY_DEFAULT));
}
}
| Add isClosed().
Fix docs
| swipe-reveal-layout/src/main/java/com/chauthai/swipereveallayout/SwipeRevealLayout.java | Add isClosed(). Fix docs | <ide><path>wipe-reveal-layout/src/main/java/com/chauthai/swipereveallayout/SwipeRevealLayout.java
<ide> }
<ide>
<ide> /**
<del> * @return true if the state is opened, false otherwise.
<add> * @return true if layout is fully opened, false otherwise.
<ide> */
<ide> public boolean isOpened() {
<ide> return (mState == STATE_OPEN);
<add> }
<add>
<add> /**
<add> * @return true if layout is fully closed, false otherwise.
<add> */
<add> public boolean isClosed() {
<add> return (mState == STATE_CLOSE);
<ide> }
<ide>
<ide> /** Only used for {@link ViewBinderHelper} */ |
|
Java | apache-2.0 | b14396fdd8bfbc17eb3e463527cc16cba3f88f4e | 0 | takenet/lime-java,andrebires/lime-java | package org.limeprotocol.util;
import java.util.LinkedHashMap;
import java.util.Map;
public class StringUtils {
public static boolean isNullOrEmpty(String string){
return string == null || string.equals("");
}
public static boolean isNullOrWhiteSpace(String string){
return isNullOrEmpty(string) || string.trim().length() == 0;
}
public static String format(String pattern, Object... values){
Map<String, Object> tags = new LinkedHashMap<String, Object>();
for (int i=0; i<values.length; i++){
tags.put("\\{" + i + "\\}", values[i]==null ? "" : values[i]);
}
String formatted = pattern;
for (Map.Entry<String, Object> tag : tags.entrySet()) {
// bottleneck, creating temporary String objects!
formatted = formatted.replaceAll(tag.getKey(), tag.getValue().toString());
}
return formatted;
}
public static String trimEnd(String string, String finalCharacter){
string.trim();
if(string.endsWith(finalCharacter)){
return string.substring(0, string.length()-1);
}
return string;
}
}
| src/main/java/org/limeprotocol/util/StringUtils.java | package org.limeprotocol.util;
import java.util.LinkedHashMap;
import java.util.Map;
public class StringUtils {
public static boolean isNullOrEmpty(String string){
return string == null || string.equals("");
}
public static boolean isNullOrWhiteSpace(String string){
return isNullOrEmpty(string) || string.trim().length() == 0;
}
public static String format(String pattern, Object... values){
Map<String, Object> tags = new LinkedHashMap<String, Object>();
for (int i=0; i<values.length; i++){
tags.put("\\{" + i + "\\}", values[i]);
}
String formatted = pattern;
for (Map.Entry<String, Object> tag : tags.entrySet()) {
// bottleneck, creating temporary String objects!
formatted = formatted.replaceAll(tag.getKey(), tag.getValue().toString());
}
return formatted;
}
public static String trimEnd(String string, String finalCharacter){
string.trim();
if(string.endsWith(finalCharacter)){
return string.substring(0, string.length()-1);
}
return string;
}
}
| Transform null values to empty string
| src/main/java/org/limeprotocol/util/StringUtils.java | Transform null values to empty string | <ide><path>rc/main/java/org/limeprotocol/util/StringUtils.java
<ide> Map<String, Object> tags = new LinkedHashMap<String, Object>();
<ide>
<ide> for (int i=0; i<values.length; i++){
<del> tags.put("\\{" + i + "\\}", values[i]);
<add> tags.put("\\{" + i + "\\}", values[i]==null ? "" : values[i]);
<ide> }
<ide>
<ide> String formatted = pattern; |
|
JavaScript | mit | 45327d79757119b2e769bede28932d5b2df5bfc1 | 0 | heroku/node-heroku-client | 'use strict';
var http = require('http');
var https = require('https');
var concat = require('concat-stream');
var lazy = require('lazy.js');
var logfmt = require('logfmt');
var q = require('q');
var cache;
var encryptor;
module.exports = Request;
/*
* Create an object capable of making API
* calls. Accepts custom request options and
* a callback function.
*/
function Request(options, callback) {
this.options = options || {};
this.host = options.host || 'api.heroku.com';
this.log = options.log;
this.debug = options.debug;
this.callback = callback;
this.deferred = q.defer();
this.parseJSON = options.hasOwnProperty('parseJSON') ? options.parseJSON : true;
this.nextRange = 'id ]..; max=1000';
this.logger = logfmt.namespace({
source: 'heroku-client',
method: options.method || 'GET',
path : options.path
}).time();
}
/*
* Instantiate a Request object and makes a
* request, returning the request promise.
*/
Request.request = function request(options, callback) {
var req = new Request(options, function (err, body) {
if (callback) { callback(err, body); }
});
return req.request();
};
/*
* Check for a cached response, then
* perform an API request. Return the
* request object's promise.
*/
Request.prototype.request = function request() {
this.getCache(this.performRequest.bind(this));
return this.deferred.promise;
};
/*
* Perform the actual API request.
*/
Request.prototype.performRequest = function performRequest(cachedResponse) {
var defaultRequestOptions,
headers,
key,
requestOptions,
req;
this.cachedResponse = cachedResponse;
headers = {
'Accept': 'application/vnd.heroku+json; version=3',
'Content-type': 'application/json',
'Range': this.nextRange
};
this.options.headers = this.options.headers || {};
for (key in this.options.headers) {
if (this.options.headers.hasOwnProperty(key)) {
headers[key] = this.options.headers[key];
}
}
if (this.cachedResponse) {
headers['If-None-Match'] = this.cachedResponse.etag;
}
defaultRequestOptions = {
auth: this.options.auth || ':' + this.options.token,
method: this.options.method || 'GET',
headers: headers
};
requestOptions = this.getRequestOptions(defaultRequestOptions);
if (process.env.HEROKU_HTTP_PROXY_HOST) {
headers.Host = this.host;
req = http.request(requestOptions, this.handleResponse.bind(this));
} else {
req = https.request(requestOptions, this.handleResponse.bind(this));
}
if (this.debug) {
console.error('--> ' + req.method + ' ' + req.path);
}
this.writeBody(req);
this.setRequestTimeout(req);
req.on('error', this.handleError.bind(this));
req.end();
};
/*
* Set return the correct request options, based on whether or not we're using
* an HTTP proxy.
*/
Request.prototype.getRequestOptions = function getRequestOptions(defaultOptions) {
var requestOptions;
if (process.env.HEROKU_HTTP_PROXY_HOST) {
requestOptions = {
agent: new http.Agent({ maxSockets: Number(process.env.HEROKU_CLIENT_MAX_SOCKETS) || 5000 }),
host : process.env.HEROKU_HTTP_PROXY_HOST,
port : process.env.HEROKU_HTTP_PROXY_PORT || 8080,
path : 'https://' + this.host + this.options.path
};
} else {
requestOptions = {
agent: new https.Agent({ maxSockets: Number(process.env.HEROKU_CLIENT_MAX_SOCKETS) || 5000 }),
host : this.host,
port : 443,
path : this.options.path
};
}
return lazy(requestOptions).merge(defaultOptions).toObject();
};
/*
* Handle an API response, returning the
* cached body if it's still valid, or the
* new API response.
*/
Request.prototype.handleResponse = function handleResponse(res) {
var self = this;
var resReader = concat(directResponse);
this.logResponse(res);
if (res.statusCode === 304 && this.cachedResponse) {
if (this.cachedResponse.nextRange) {
this.nextRequest(this.cachedResponse.nextRange, this.cachedResponse.body);
} else {
this.updateAggregate(this.cachedResponse.body);
this.deferred.resolve(this.aggregate);
this.callback(null, this.aggregate);
}
} else {
res.pipe(resReader);
}
function directResponse(data) {
if (self.debug) {
console.error('<-- ' + data);
}
if (res.statusCode.toString().match(/^2\d{2}$/)) {
self.handleSuccess(res, data);
} else {
self.handleFailure(res, data);
}
}
};
/*
* Log the API response.
*/
Request.prototype.logResponse = function logResponse(res) {
if (this.log) {
this.logger.log({
status : res.statusCode,
content_length: res.headers['content-length'],
request_id : res.headers['request-id']
});
}
if (this.debug) {
console.error('<-- ' + res.statusCode + ' ' + res.statusMessage);
}
};
/*
* If the request options include a body,
* write the body to the request and set
* an appropriate 'Content-length' header.
*/
Request.prototype.writeBody = function writeBody(req) {
if (this.options.body) {
var body = JSON.stringify(this.options.body);
if (this.debug) {
console.error('--> ' + body);
}
req.setHeader('Content-length', body.length);
req.write(body);
} else {
req.setHeader('Content-length', 0);
}
};
/*
* If the request options include a timeout,
* set the timeout and provide a callback
* function in case the request exceeds the
* timeout period.
*/
Request.prototype.setRequestTimeout = function setRequestTimeout(req) {
if (!this.options.timeout) { return; }
req.setTimeout(this.options.timeout, function () {
var err = new Error('Request took longer than ' + this.options.timeout + 'ms to complete.');
req.abort();
this.deferred.reject(err);
this.callback(err);
}.bind(this));
};
/*
* Get the request body, and parse it (or not) as appropriate.
* - Parse JSON by default.
* - If parseJSON is `false`, it will not parse.
*/
Request.prototype.parseBody = function parseBody(body) {
if (this.parseJSON) {
return JSON.parse(body || '{}');
} else {
return body;
}
};
/*
* In the event of an error in performing
* the API request, reject the deferred
* object and return an error to the callback.
*/
Request.prototype.handleError = function handleError(err) {
this.deferred.reject(err);
this.callback(err);
};
/*
* In the event of a non-successful API request,
* fail with an appropriate error message and
* status code.
*/
Request.prototype.handleFailure = function handleFailure(res, buffer) {
var callback = this.callback;
var deferred = this.deferred;
var message = 'Expected response to be successful, got ' + res.statusCode;
var err;
err = new Error(message);
err.statusCode = res.statusCode;
err.body = this.parseBody(buffer);
deferred.reject(err);
callback(err);
};
/*
* In the event of a successful API response,
* write the response to the cache and resolve
* with the response body.
*/
Request.prototype.handleSuccess = function handleSuccess(res, buffer) {
var callback = this.callback;
var deferred = this.deferred;
var body = this.parseBody(buffer);
this.setCache(res, body);
if (res.headers['next-range']) {
this.nextRequest(res.headers['next-range'], body);
} else {
this.updateAggregate(body);
deferred.resolve(this.aggregate);
callback(null, this.aggregate);
}
};
/*
* Since this request isn't the full response (206 or
* 304 with a cached Next-Range), perform the next
* request for more data.
*/
Request.prototype.nextRequest = function nextRequest(nextRange, body) {
this.updateAggregate(body);
this.nextRange = nextRange;
// The initial range header passed in (if there was one), is no longer valid, and should no longer take precedence
delete (this.options.headers.Range);
this.request();
};
/*
* If the cache client is alive, get the
* cached response from the cache.
*/
Request.prototype.getCache = function getCache(callback) {
if (!cache) { return callback(null); }
var key = this.getCacheKey();
cache.get(key, function (err, res) {
res = res ? encryptor.decrypt(res) : res;
callback(res);
});
};
/*
* If the cache client is alive, write the
* provided response and body to the cache.
*/
Request.prototype.setCache = function setCache(res, body) {
if ((!cache) || !(res.headers.etag)) { return; }
var key = this.getCacheKey();
var value = {
body : body,
etag : res.headers.etag,
nextRange: res.headers['next-range']
};
value = encryptor.encrypt(value);
cache.set(key, value);
};
/*
* Returns a cache key comprising the request path,
* the 'Next Range' header, and the user's API token.
*/
Request.prototype.getCacheKey = function getCacheKey() {
var path = JSON.stringify([this.options.path, this.nextRange, this.options.token]);
return encryptor.hmac(path);
};
/*
* If given an object, sets aggregate to object,
* otherwise concats array onto aggregate.
*/
Request.prototype.updateAggregate = function updateAggregate(aggregate) {
if (aggregate instanceof Array) {
this.aggregate = this.aggregate || [];
this.aggregate = this.aggregate.concat(aggregate);
} else {
this.aggregate = aggregate;
}
};
/*
* Connect a cache client.
*/
Request.connectCacheClient = function connectCacheClient(opts) {
cache = opts.cache;
encryptor = require('simple-encryptor')(opts.key);
};
| lib/request.js | 'use strict';
var http = require('http');
var https = require('https');
var concat = require('concat-stream');
var lazy = require('lazy.js');
var logfmt = require('logfmt');
var q = require('q');
var cache;
var encryptor;
module.exports = Request;
/*
* Create an object capable of making API
* calls. Accepts custom request options and
* a callback function.
*/
function Request(options, callback) {
this.options = options || {};
this.host = options.host || 'api.heroku.com';
this.log = options.log;
this.callback = callback;
this.deferred = q.defer();
this.parseJSON = options.hasOwnProperty('parseJSON') ? options.parseJSON : true;
this.nextRange = 'id ]..; max=1000';
this.logger = logfmt.namespace({
source: 'heroku-client',
method: options.method || 'GET',
path : options.path
}).time();
}
/*
* Instantiate a Request object and makes a
* request, returning the request promise.
*/
Request.request = function request(options, callback) {
var req = new Request(options, function (err, body) {
if (callback) { callback(err, body); }
});
return req.request();
};
/*
* Check for a cached response, then
* perform an API request. Return the
* request object's promise.
*/
Request.prototype.request = function request() {
this.getCache(this.performRequest.bind(this));
return this.deferred.promise;
};
/*
* Perform the actual API request.
*/
Request.prototype.performRequest = function performRequest(cachedResponse) {
var defaultRequestOptions,
headers,
key,
requestOptions,
req;
this.cachedResponse = cachedResponse;
headers = {
'Accept': 'application/vnd.heroku+json; version=3',
'Content-type': 'application/json',
'Range': this.nextRange
};
this.options.headers = this.options.headers || {};
for (key in this.options.headers) {
if (this.options.headers.hasOwnProperty(key)) {
headers[key] = this.options.headers[key];
}
}
if (this.cachedResponse) {
headers['If-None-Match'] = this.cachedResponse.etag;
}
defaultRequestOptions = {
auth: this.options.auth || ':' + this.options.token,
method: this.options.method || 'GET',
headers: headers
};
requestOptions = this.getRequestOptions(defaultRequestOptions);
if (process.env.HEROKU_HTTP_PROXY_HOST) {
headers.Host = this.host;
req = http.request(requestOptions, this.handleResponse.bind(this));
} else {
req = https.request(requestOptions, this.handleResponse.bind(this));
}
this.writeBody(req);
this.setRequestTimeout(req);
req.on('error', this.handleError.bind(this));
req.end();
};
/*
* Set return the correct request options, based on whether or not we're using
* an HTTP proxy.
*/
Request.prototype.getRequestOptions = function getRequestOptions(defaultOptions) {
var requestOptions;
if (process.env.HEROKU_HTTP_PROXY_HOST) {
requestOptions = {
agent: new http.Agent({ maxSockets: Number(process.env.HEROKU_CLIENT_MAX_SOCKETS) || 5000 }),
host : process.env.HEROKU_HTTP_PROXY_HOST,
port : process.env.HEROKU_HTTP_PROXY_PORT || 8080,
path : 'https://' + this.host + this.options.path
};
} else {
requestOptions = {
agent: new https.Agent({ maxSockets: Number(process.env.HEROKU_CLIENT_MAX_SOCKETS) || 5000 }),
host : this.host,
port : 443,
path : this.options.path
};
}
return lazy(requestOptions).merge(defaultOptions).toObject();
};
/*
* Handle an API response, returning the
* cached body if it's still valid, or the
* new API response.
*/
Request.prototype.handleResponse = function handleResponse(res) {
var self = this;
var resReader = concat(directResponse);
this.logResponse(res);
if (res.statusCode === 304 && this.cachedResponse) {
if (this.cachedResponse.nextRange) {
this.nextRequest(this.cachedResponse.nextRange, this.cachedResponse.body);
} else {
this.updateAggregate(this.cachedResponse.body);
this.deferred.resolve(this.aggregate);
this.callback(null, this.aggregate);
}
} else {
res.pipe(resReader);
}
function directResponse(data) {
if (res.statusCode.toString().match(/^2\d{2}$/)) {
self.handleSuccess(res, data);
} else {
self.handleFailure(res, data);
}
}
};
/*
* Log the API response.
*/
Request.prototype.logResponse = function logResponse(res) {
if (this.log) {
this.logger.log({
status : res.statusCode,
content_length: res.headers['content-length'],
request_id : res.headers['request-id']
});
}
};
/*
* If the request options include a body,
* write the body to the request and set
* an appropriate 'Content-length' header.
*/
Request.prototype.writeBody = function writeBody(req) {
if (this.options.body) {
var body = JSON.stringify(this.options.body);
req.setHeader('Content-length', body.length);
req.write(body);
} else {
req.setHeader('Content-length', 0);
}
};
/*
* If the request options include a timeout,
* set the timeout and provide a callback
* function in case the request exceeds the
* timeout period.
*/
Request.prototype.setRequestTimeout = function setRequestTimeout(req) {
if (!this.options.timeout) { return; }
req.setTimeout(this.options.timeout, function () {
var err = new Error('Request took longer than ' + this.options.timeout + 'ms to complete.');
req.abort();
this.deferred.reject(err);
this.callback(err);
}.bind(this));
};
/*
* Get the request body, and parse it (or not) as appropriate.
* - Parse JSON by default.
* - If parseJSON is `false`, it will not parse.
*/
Request.prototype.parseBody = function parseBody(body) {
if (this.parseJSON) {
return JSON.parse(body || '{}');
} else {
return body;
}
};
/*
* In the event of an error in performing
* the API request, reject the deferred
* object and return an error to the callback.
*/
Request.prototype.handleError = function handleError(err) {
this.deferred.reject(err);
this.callback(err);
};
/*
* In the event of a non-successful API request,
* fail with an appropriate error message and
* status code.
*/
Request.prototype.handleFailure = function handleFailure(res, buffer) {
var callback = this.callback;
var deferred = this.deferred;
var message = 'Expected response to be successful, got ' + res.statusCode;
var err;
err = new Error(message);
err.statusCode = res.statusCode;
err.body = this.parseBody(buffer);
deferred.reject(err);
callback(err);
};
/*
* In the event of a successful API response,
* write the response to the cache and resolve
* with the response body.
*/
Request.prototype.handleSuccess = function handleSuccess(res, buffer) {
var callback = this.callback;
var deferred = this.deferred;
var body = this.parseBody(buffer);
this.setCache(res, body);
if (res.headers['next-range']) {
this.nextRequest(res.headers['next-range'], body);
} else {
this.updateAggregate(body);
deferred.resolve(this.aggregate);
callback(null, this.aggregate);
}
};
/*
* Since this request isn't the full response (206 or
* 304 with a cached Next-Range), perform the next
* request for more data.
*/
Request.prototype.nextRequest = function nextRequest(nextRange, body) {
this.updateAggregate(body);
this.nextRange = nextRange;
// The initial range header passed in (if there was one), is no longer valid, and should no longer take precedence
delete (this.options.headers.Range);
this.request();
};
/*
* If the cache client is alive, get the
* cached response from the cache.
*/
Request.prototype.getCache = function getCache(callback) {
if (!cache) { return callback(null); }
var key = this.getCacheKey();
cache.get(key, function (err, res) {
res = res ? encryptor.decrypt(res) : res;
callback(res);
});
};
/*
* If the cache client is alive, write the
* provided response and body to the cache.
*/
Request.prototype.setCache = function setCache(res, body) {
if ((!cache) || !(res.headers.etag)) { return; }
var key = this.getCacheKey();
var value = {
body : body,
etag : res.headers.etag,
nextRange: res.headers['next-range']
};
value = encryptor.encrypt(value);
cache.set(key, value);
};
/*
* Returns a cache key comprising the request path,
* the 'Next Range' header, and the user's API token.
*/
Request.prototype.getCacheKey = function getCacheKey() {
var path = JSON.stringify([this.options.path, this.nextRange, this.options.token]);
return encryptor.hmac(path);
};
/*
* If given an object, sets aggregate to object,
* otherwise concats array onto aggregate.
*/
Request.prototype.updateAggregate = function updateAggregate(aggregate) {
if (aggregate instanceof Array) {
this.aggregate = this.aggregate || [];
this.aggregate = this.aggregate.concat(aggregate);
} else {
this.aggregate = aggregate;
}
};
/*
* Connect a cache client.
*/
Request.connectCacheClient = function connectCacheClient(opts) {
cache = opts.cache;
encryptor = require('simple-encryptor')(opts.key);
};
| added debug info
| lib/request.js | added debug info | <ide><path>ib/request.js
<ide> this.options = options || {};
<ide> this.host = options.host || 'api.heroku.com';
<ide> this.log = options.log;
<add> this.debug = options.debug;
<ide> this.callback = callback;
<ide> this.deferred = q.defer();
<ide> this.parseJSON = options.hasOwnProperty('parseJSON') ? options.parseJSON : true;
<ide> req = https.request(requestOptions, this.handleResponse.bind(this));
<ide> }
<ide>
<add> if (this.debug) {
<add> console.error('--> ' + req.method + ' ' + req.path);
<add> }
<add>
<ide> this.writeBody(req);
<ide> this.setRequestTimeout(req);
<ide>
<ide> }
<ide>
<ide> function directResponse(data) {
<add> if (self.debug) {
<add> console.error('<-- ' + data);
<add> }
<ide> if (res.statusCode.toString().match(/^2\d{2}$/)) {
<ide> self.handleSuccess(res, data);
<ide> } else {
<ide> request_id : res.headers['request-id']
<ide> });
<ide> }
<add> if (this.debug) {
<add> console.error('<-- ' + res.statusCode + ' ' + res.statusMessage);
<add> }
<ide> };
<ide>
<ide>
<ide> Request.prototype.writeBody = function writeBody(req) {
<ide> if (this.options.body) {
<ide> var body = JSON.stringify(this.options.body);
<add> if (this.debug) {
<add> console.error('--> ' + body);
<add> }
<ide>
<ide> req.setHeader('Content-length', body.length);
<ide> req.write(body); |
|
Java | apache-2.0 | 59f96a9880e38b945bc34285f4c842df5622d8bb | 0 | salguarnieri/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,apixandru/intellij-community,asedunov/intellij-community,allotria/intellij-community,allotria/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,signed/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,da1z/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,fitermay/intellij-community,xfournet/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,xfournet/intellij-community,signed/intellij-community,fitermay/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,signed/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,semonte/intellij-community,apixandru/intellij-community,ibinti/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,asedunov/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,retomerz/intellij-community,asedunov/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,fitermay/intellij-community,ibinti/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,asedunov/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,FHannes/intellij-community,xfournet/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,youdonghai/intellij-community,da1z/intellij-community,semonte/intellij-community,apixandru/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,FHannes/intellij-community,ibinti/intellij-community,ibinti/intellij-community,signed/intellij-community,retomerz/intellij-community,ibinti/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,da1z/intellij-community,retomerz/intellij-community,ibinti/intellij-community,signed/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,signed/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,xfournet/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,semonte/intellij-community,vvv1559/intellij-community,allotria/intellij-community,da1z/intellij-community,semonte/intellij-community,signed/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,da1z/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,hurricup/intellij-community,retomerz/intellij-community,allotria/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,signed/intellij-community,da1z/intellij-community,apixandru/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,signed/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,da1z/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,allotria/intellij-community,ibinti/intellij-community,semonte/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,allotria/intellij-community,semonte/intellij-community,hurricup/intellij-community,da1z/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ibinti/intellij-community,fitermay/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,hurricup/intellij-community,apixandru/intellij-community,FHannes/intellij-community,xfournet/intellij-community,FHannes/intellij-community,semonte/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,allotria/intellij-community,ibinti/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,da1z/intellij-community,hurricup/intellij-community,signed/intellij-community,signed/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide;
import com.intellij.codeInsight.hint.HintUtil;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.ex.AnActionListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ApplicationComponent;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.ui.popup.BalloonBuilder;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.registry.RegistryValue;
import com.intellij.openapi.util.registry.RegistryValueListener;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.components.panels.Wrapper;
import com.intellij.util.Alarm;
import com.intellij.util.IJSwingUtilities;
import com.intellij.util.ui.Html;
import com.intellij.util.ui.JBInsets;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.border.EmptyBorder;
import javax.swing.text.*;
import javax.swing.text.html.HTML;
import javax.swing.text.html.HTMLEditorKit;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.AWTEventListener;
import java.awt.event.MouseEvent;
public class IdeTooltipManager implements ApplicationComponent, AWTEventListener {
public static final String IDE_TOOLTIP_PLACE = "IdeTooltip";
public static final Color GRAPHITE_COLOR = new Color(100, 100, 100, 230);
private RegistryValue myIsEnabled;
private Component myCurrentComponent;
private Component myQueuedComponent;
private BalloonImpl myCurrentTipUi;
private MouseEvent myCurrentEvent;
private boolean myCurrentTipIsCentered;
private Runnable myHideRunnable;
private final JBPopupFactory myPopupFactory;
private boolean myShowDelay = true;
private final Alarm myAlarm = new Alarm();
private int myX;
private int myY;
private IdeTooltip myCurrentTooltip;
private Runnable myShowRequest;
private IdeTooltip myQueuedTooltip;
public IdeTooltipManager(JBPopupFactory popupFactory) {
myPopupFactory = popupFactory;
}
@Override
public void initComponent() {
myIsEnabled = Registry.get("ide.tooltip.callout");
myIsEnabled.addListener(new RegistryValueListener.Adapter() {
@Override
public void afterValueChanged(RegistryValue value) {
processEnabled();
}
}, ApplicationManager.getApplication());
Toolkit.getDefaultToolkit().addAWTEventListener(this, AWTEvent.MOUSE_EVENT_MASK | AWTEvent.MOUSE_MOTION_EVENT_MASK);
ActionManager.getInstance().addAnActionListener(new AnActionListener.Adapter() {
@Override
public void beforeActionPerformed(AnAction action, DataContext dataContext, AnActionEvent event) {
hideCurrent(null, action, event);
}
}, ApplicationManager.getApplication());
processEnabled();
}
@Override
public void eventDispatched(AWTEvent event) {
if (!myIsEnabled.asBoolean()) return;
MouseEvent me = (MouseEvent)event;
Component c = me.getComponent();
if (me.getID() == MouseEvent.MOUSE_ENTERED) {
boolean canShow = true;
if (c != myCurrentComponent) {
canShow = hideCurrent(me, null, null);
}
if (canShow) {
maybeShowFor(c, me);
}
}
else if (me.getID() == MouseEvent.MOUSE_EXITED) {
if (c == myCurrentComponent || c == myQueuedComponent) {
hideCurrent(me, null, null);
}
}
else if (me.getID() == MouseEvent.MOUSE_MOVED) {
if (c == myCurrentComponent || c == myQueuedComponent) {
if (myCurrentTipUi != null && myCurrentTipUi.wasFadedIn()) {
maybeShowFor(c, me);
}
else {
if (!myCurrentTipIsCentered) {
myX = me.getX();
myY = me.getY();
if (c instanceof JComponent && ((JComponent)c).getToolTipText(me) == null && (myQueuedTooltip == null || !myQueuedTooltip.isHint())) {
hideCurrent(me, null, null);//There is no tooltip or hint here, let's proceed it as MOUSE_EXITED
}
else {
maybeShowFor(c, me);
}
}
}
}
else if (myCurrentComponent == null && myQueuedComponent == null) {
maybeShowFor(c, me);
}
}
else if (me.getID() == MouseEvent.MOUSE_PRESSED) {
boolean clickOnTooltip = myCurrentTipUi != null && myCurrentTipUi == JBPopupFactory.getInstance().getParentBalloonFor(c);
if (c == myCurrentComponent || clickOnTooltip) {
hideCurrent(me, null, null, null, !clickOnTooltip);
}
}
else if (me.getID() == MouseEvent.MOUSE_DRAGGED) {
hideCurrent(me, null, null);
}
}
private void maybeShowFor(Component c, MouseEvent me) {
if (!(c instanceof JComponent)) return;
JComponent comp = (JComponent)c;
Window wnd = SwingUtilities.getWindowAncestor(comp);
if (wnd == null) return;
if (!wnd.isActive()) {
if (JBPopupFactory.getInstance().isChildPopupFocused(wnd)) return;
}
String tooltipText = comp.getToolTipText(me);
if (tooltipText == null || tooltipText.trim().isEmpty()) return;
boolean centerDefault = Boolean.TRUE.equals(comp.getClientProperty(UIUtil.CENTER_TOOLTIP_DEFAULT));
boolean centerStrict = Boolean.TRUE.equals(comp.getClientProperty(UIUtil.CENTER_TOOLTIP_STRICT));
int shift = centerStrict ? 0 : centerDefault ? 4 : 0;
// Balloon may appear exactly above useful content, such behavior is rather annoying.
if (c instanceof JTree) {
TreePath path = ((JTree)c).getClosestPathForLocation(me.getX(), me.getY());
if (path != null) {
Rectangle pathBounds = ((JTree)c).getPathBounds(path);
if (pathBounds != null && pathBounds.y + 4 < me.getY()) {
shift += me.getY() - pathBounds.y - 4;
}
}
}
queueShow(comp, me, centerStrict || centerDefault, shift, -shift, -shift);
}
private void queueShow(final JComponent c, final MouseEvent me, final boolean toCenter, int shift, int posChangeX, int posChangeY) {
String aText = String.valueOf(c.getToolTipText(me));
final IdeTooltip tooltip = new IdeTooltip(c, me.getPoint(), null, /*new Object()*/c, aText) {
@Override
protected boolean beforeShow() {
myCurrentEvent = me;
if (!c.isShowing()) return false;
String text = c.getToolTipText(myCurrentEvent);
if (text == null || text.trim().isEmpty()) return false;
JLayeredPane layeredPane = IJSwingUtilities.findParentOfType(c, JLayeredPane.class);
final JEditorPane pane = initPane(text, new HintHint(me).setAwtTooltip(true), layeredPane);
final Wrapper wrapper = new Wrapper(pane);
setTipComponent(wrapper);
return true;
}
}.setToCenter(toCenter).setCalloutShift(shift).setPositionChangeShift(posChangeX, posChangeY).setLayer(Balloon.Layer.top);
show(tooltip, false);
}
public IdeTooltip show(final IdeTooltip tooltip, boolean now) {
return show(tooltip, now, true);
}
public IdeTooltip show(final IdeTooltip tooltip, boolean now, final boolean animationEnabled) {
myAlarm.cancelAllRequests();
hideCurrent(null, tooltip, null, null);
myQueuedComponent = tooltip.getComponent();
myQueuedTooltip = tooltip;
myShowRequest = new Runnable() {
@Override
public void run() {
if (myShowRequest == null) {
return;
}
if (myQueuedComponent != tooltip.getComponent() || !tooltip.getComponent().isShowing()) {
hideCurrent(null, tooltip, null, null, animationEnabled);
return;
}
if (tooltip.beforeShow()) {
show(tooltip, null, animationEnabled);
}
else {
hideCurrent(null, tooltip, null, null, animationEnabled);
}
}
};
if (now) {
myShowRequest.run();
}
else {
myAlarm.addRequest(myShowRequest, myShowDelay ? tooltip.getShowDelay() : tooltip.getInitialReshowDelay());
}
return tooltip;
}
private void show(final IdeTooltip tooltip, @Nullable Runnable beforeShow, boolean animationEnabled) {
boolean toCenterX;
boolean toCenterY;
boolean toCenter = tooltip.isToCenter();
boolean small = false;
if (!toCenter && tooltip.isToCenterIfSmall()) {
Dimension size = tooltip.getComponent().getSize();
toCenterX = size.width < 64;
toCenterY = size.height < 64;
toCenter = toCenterX || toCenterY;
small = true;
}
else {
toCenterX = true;
toCenterY = true;
}
Point effectivePoint = tooltip.getPoint();
if (toCenter) {
Rectangle bounds = tooltip.getComponent().getBounds();
effectivePoint.x = toCenterX ? bounds.width / 2 : effectivePoint.x;
effectivePoint.y = toCenterY ? bounds.height / 2 : effectivePoint.y;
}
if (myCurrentComponent == tooltip.getComponent() && myCurrentTipUi != null && !myCurrentTipUi.isDisposed()) {
myCurrentTipUi.show(new RelativePoint(tooltip.getComponent(), effectivePoint), tooltip.getPreferredPosition());
return;
}
if (myCurrentComponent == tooltip.getComponent() && effectivePoint.equals(new Point(myX, myY))) {
return;
}
Color bg = tooltip.getTextBackground() != null ? tooltip.getTextBackground() : getTextBackground(true);
Color fg = tooltip.getTextForeground() != null ? tooltip.getTextForeground() : getTextForeground(true);
Color border = tooltip.getBorderColor() != null ? tooltip.getBorderColor() : getBorderColor(true);
BalloonBuilder builder = myPopupFactory.createBalloonBuilder(tooltip.getTipComponent())
.setFillColor(bg)
.setBorderColor(border)
.setBorderInsets(tooltip.getBorderInsets())
.setAnimationCycle(animationEnabled ? Registry.intValue("ide.tooltip.animationCycle") : 0)
.setShowCallout(true)
.setCalloutShift(small && tooltip.getCalloutShift() == 0 ? 2 : tooltip.getCalloutShift())
.setPositionChangeXShift(tooltip.getPositionChangeX())
.setPositionChangeYShift(tooltip.getPositionChangeY())
.setHideOnKeyOutside(!tooltip.isExplicitClose())
.setHideOnAction(!tooltip.isExplicitClose())
.setLayer(tooltip.getLayer());
tooltip.getTipComponent().setForeground(fg);
tooltip.getTipComponent().setBorder(new EmptyBorder(1, 3, 2, 3));
tooltip.getTipComponent().setFont(tooltip.getFont() != null ? tooltip.getFont() : getTextFont(true));
if (beforeShow != null) {
beforeShow.run();
}
myCurrentTipUi = (BalloonImpl)builder.createBalloon();
myCurrentTipUi.setAnimationEnabled(animationEnabled);
tooltip.setUi(myCurrentTipUi);
myCurrentComponent = tooltip.getComponent();
myX = effectivePoint.x;
myY = effectivePoint.y;
myCurrentTipIsCentered = toCenter;
myCurrentTooltip = tooltip;
myShowRequest = null;
myQueuedComponent = null;
myQueuedTooltip = null;
myCurrentTipUi.show(new RelativePoint(tooltip.getComponent(), effectivePoint), tooltip.getPreferredPosition());
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
if (myCurrentTooltip == tooltip && tooltip.canBeDismissedOnTimeout()) {
hideCurrent(null, null, null);
}
}
}, tooltip.getDismissDelay());
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Color getTextForeground(boolean awtTooltip) {
return UIUtil.getToolTipForeground();
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Color getLinkForeground(boolean awtTooltip) {
return JBColor.blue;
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Color getTextBackground(boolean awtTooltip) {
return UIUtil.getToolTipBackground();
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public String getUlImg(boolean awtTooltip) {
AllIcons.General.Mdot.getIconWidth(); // keep icon reference
return UIUtil.isUnderDarcula() ? "/general/mdot-white.png" : "/general/mdot.png";
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Color getBorderColor(boolean awtTooltip) {
return new JBColor(Gray._160, new Color(154, 154, 102));
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public boolean isOwnBorderAllowed(boolean awtTooltip) {
return !awtTooltip;
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public boolean isOpaqueAllowed(boolean awtTooltip) {
return !awtTooltip;
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Font getTextFont(boolean awtTooltip) {
return UIManager.getFont("ToolTip.font");
}
public boolean hasCurrent() {
return myCurrentTooltip != null;
}
public boolean hideCurrent(@Nullable MouseEvent me) {
return hideCurrent(me, null, null, null);
}
private boolean hideCurrent(@Nullable MouseEvent me, @Nullable AnAction action, @Nullable AnActionEvent event) {
return hideCurrent(me, null, action, event, myCurrentTipUi != null && myCurrentTipUi.isAnimationEnabled());
}
private boolean hideCurrent(@Nullable MouseEvent me,
@Nullable IdeTooltip tooltipToShow,
@Nullable AnAction action,
@Nullable AnActionEvent event) {
return hideCurrent(me, tooltipToShow, action, event, myCurrentTipUi != null && myCurrentTipUi.isAnimationEnabled());
}
private boolean hideCurrent(@Nullable MouseEvent me, @Nullable IdeTooltip tooltipToShow, @Nullable AnAction action, @Nullable AnActionEvent event, final boolean animationEnabled) {
if (myCurrentTooltip != null && me != null && myCurrentTooltip.isInside(RelativePoint.fromScreen(me.getLocationOnScreen()))) {
if (me.getButton() == MouseEvent.NOBUTTON || myCurrentTipUi == null || myCurrentTipUi.isBlockClicks()) {
return false;
}
}
myShowRequest = null;
myQueuedComponent = null;
myQueuedTooltip = null;
if (myCurrentTooltip == null) return true;
if (myCurrentTipUi != null) {
RelativePoint target = me != null ? new RelativePoint(me) : null;
boolean isInside = target != null && myCurrentTipUi.isInside(target);
boolean isMovingForward = target != null && myCurrentTipUi.isMovingForward(target);
boolean canAutoHide = myCurrentTooltip.canAutohideOn(new TooltipEvent(me, isInside || isMovingForward, action, event));
boolean implicitMouseMove = me != null &&
(me.getID() == MouseEvent.MOUSE_MOVED ||
me.getID() == MouseEvent.MOUSE_EXITED ||
me.getID() == MouseEvent.MOUSE_ENTERED);
if (!canAutoHide
|| (myCurrentTooltip.isExplicitClose() && implicitMouseMove)
|| (tooltipToShow != null && !tooltipToShow.isHint() && Comparing.equal(myCurrentTooltip, tooltipToShow))) {
if (myHideRunnable != null) {
myHideRunnable = null;
}
return false;
}
}
myHideRunnable = new Runnable() {
@Override
public void run() {
if (myHideRunnable != null) {
hideCurrentNow(animationEnabled);
myHideRunnable = null;
}
}
};
if (me != null && me.getButton() == MouseEvent.NOBUTTON) {
myAlarm.addRequest(myHideRunnable, Registry.intValue("ide.tooltip.autoDismissDeadZone"));
}
else {
myHideRunnable.run();
myHideRunnable = null;
}
return true;
}
public void hideCurrentNow(boolean animationEnabled) {
if (myCurrentTipUi != null) {
myCurrentTipUi.setAnimationEnabled(animationEnabled);
myCurrentTipUi.hide();
myCurrentTooltip.onHidden();
myShowDelay = false;
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
myShowDelay = true;
}
}, Registry.intValue("ide.tooltip.reshowDelay"));
}
myShowRequest = null;
myCurrentTooltip = null;
myCurrentTipUi = null;
myCurrentComponent = null;
myQueuedComponent = null;
myQueuedTooltip = null;
myCurrentEvent = null;
myCurrentTipIsCentered = false;
myX = -1;
myY = -1;
}
private void processEnabled() {
if (myIsEnabled.asBoolean()) {
ToolTipManager.sharedInstance().setEnabled(false);
}
else {
ToolTipManager.sharedInstance().setEnabled(true);
}
}
@Override
public void disposeComponent() {
}
public static IdeTooltipManager getInstance() {
return ApplicationManager.getApplication().getComponent(IdeTooltipManager.class);
}
public void hide(@Nullable IdeTooltip tooltip) {
if (myCurrentTooltip == tooltip || tooltip == null || tooltip == myQueuedTooltip) {
hideCurrent(null, null, null);
}
}
public void cancelAutoHide() {
myHideRunnable = null;
}
public static JEditorPane initPane(@NonNls String text, final HintHint hintHint, @Nullable final JLayeredPane layeredPane) {
return initPane(new Html(text), hintHint, layeredPane);
}
public static JEditorPane initPane(@NonNls Html html, final HintHint hintHint, @Nullable final JLayeredPane layeredPane) {
final Ref<Dimension> prefSize = new Ref<Dimension>(null);
@NonNls String text = HintUtil.prepareHintText(html, hintHint);
final boolean[] prefSizeWasComputed = {false};
final JEditorPane pane = new JEditorPane() {
@Override
public Dimension getPreferredSize() {
if (!prefSizeWasComputed[0] && hintHint.isAwtTooltip()) {
JLayeredPane lp = layeredPane;
if (lp == null) {
JRootPane rootPane = UIUtil.getRootPane(this);
if (rootPane != null) {
lp = rootPane.getLayeredPane();
}
}
Dimension size;
if (lp != null) {
size = lp.getSize();
prefSizeWasComputed[0] = true;
}
else {
size = ScreenUtil.getScreenRectangle(0, 0).getSize();
}
int fitWidth = (int)(size.width * 0.8);
Dimension prefSizeOriginal = super.getPreferredSize();
if (prefSizeOriginal.width > fitWidth) {
setSize(new Dimension(fitWidth, Integer.MAX_VALUE));
Dimension fixedWidthSize = super.getPreferredSize();
Dimension minSize = super.getMinimumSize();
prefSize.set(new Dimension(fitWidth > minSize.width ? fitWidth : minSize.width, fixedWidthSize.height));
}
else {
prefSize.set(new Dimension(prefSizeOriginal));
}
}
Dimension s = prefSize.get() != null ? new Dimension(prefSize.get()) : super.getPreferredSize();
Border b = getBorder();
if (b != null) {
JBInsets.addTo(s, b.getBorderInsets(this));
}
return s;
}
@Override
public void setPreferredSize(Dimension preferredSize) {
super.setPreferredSize(preferredSize);
prefSize.set(preferredSize);
}
};
final HTMLEditorKit.HTMLFactory factory = new HTMLEditorKit.HTMLFactory() {
@Override
public View create(Element elem) {
AttributeSet attrs = elem.getAttributes();
Object elementName = attrs.getAttribute(AbstractDocument.ElementNameAttribute);
Object o = elementName != null ? null : attrs.getAttribute(StyleConstants.NameAttribute);
if (o instanceof HTML.Tag) {
HTML.Tag kind = (HTML.Tag)o;
if (kind == HTML.Tag.HR) {
return new CustomHrView(elem, hintHint.getTextForeground());
}
}
return super.create(elem);
}
};
HTMLEditorKit kit = new HTMLEditorKit() {
@Override
public ViewFactory getViewFactory() {
return factory;
}
};
pane.setEditorKit(kit);
pane.setText(text);
pane.setCaretPosition(0);
pane.setEditable(false);
if (hintHint.isOwnBorderAllowed()) {
setBorder(pane);
setColors(pane);
}
else {
pane.setBorder(null);
}
if (!hintHint.isAwtTooltip()) {
prefSizeWasComputed[0] = true;
}
final boolean opaque = hintHint.isOpaqueAllowed();
pane.setOpaque(opaque);
if (UIUtil.isUnderNimbusLookAndFeel() && !opaque) {
pane.setBackground(UIUtil.TRANSPARENT_COLOR);
}
else {
pane.setBackground(hintHint.getTextBackground());
}
return pane;
}
public static void setColors(JComponent pane) {
pane.setForeground(JBColor.foreground());
pane.setBackground(HintUtil.INFORMATION_COLOR);
pane.setOpaque(true);
}
public static void setBorder(JComponent pane) {
pane.setBorder(
BorderFactory.createCompoundBorder(BorderFactory.createLineBorder(Color.black), BorderFactory.createEmptyBorder(0, 5, 0, 5)));
}
@NotNull
@Override
public String getComponentName() {
return "IDE Tooltip Manager";
}
public boolean isQueuedToShow(IdeTooltip tooltip) {
return Comparing.equal(myQueuedTooltip, tooltip);
}
}
| platform/platform-impl/src/com/intellij/ide/IdeTooltipManager.java | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide;
import com.intellij.codeInsight.hint.HintUtil;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.ex.AnActionListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ApplicationComponent;
import com.intellij.openapi.ui.popup.Balloon;
import com.intellij.openapi.ui.popup.BalloonBuilder;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.registry.RegistryValue;
import com.intellij.openapi.util.registry.RegistryValueListener;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.components.panels.Wrapper;
import com.intellij.util.Alarm;
import com.intellij.util.IJSwingUtilities;
import com.intellij.util.ui.Html;
import com.intellij.util.ui.JBInsets;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.border.EmptyBorder;
import javax.swing.text.*;
import javax.swing.text.html.HTML;
import javax.swing.text.html.HTMLEditorKit;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.AWTEventListener;
import java.awt.event.MouseEvent;
public class IdeTooltipManager implements ApplicationComponent, AWTEventListener {
public static final String IDE_TOOLTIP_PLACE = "IdeTooltip";
public static final Color GRAPHITE_COLOR = new Color(100, 100, 100, 230);
private RegistryValue myIsEnabled;
private Component myCurrentComponent;
private Component myQueuedComponent;
private BalloonImpl myCurrentTipUi;
private MouseEvent myCurrentEvent;
private boolean myCurrentTipIsCentered;
private Runnable myHideRunnable;
private final JBPopupFactory myPopupFactory;
private boolean myShowDelay = true;
private final Alarm myAlarm = new Alarm();
private int myX;
private int myY;
private IdeTooltip myCurrentTooltip;
private Runnable myShowRequest;
private IdeTooltip myQueuedTooltip;
public IdeTooltipManager(JBPopupFactory popupFactory) {
myPopupFactory = popupFactory;
}
@Override
public void initComponent() {
myIsEnabled = Registry.get("ide.tooltip.callout");
myIsEnabled.addListener(new RegistryValueListener.Adapter() {
@Override
public void afterValueChanged(RegistryValue value) {
processEnabled();
}
}, ApplicationManager.getApplication());
Toolkit.getDefaultToolkit().addAWTEventListener(this, AWTEvent.MOUSE_EVENT_MASK | AWTEvent.MOUSE_MOTION_EVENT_MASK);
ActionManager.getInstance().addAnActionListener(new AnActionListener.Adapter() {
@Override
public void beforeActionPerformed(AnAction action, DataContext dataContext, AnActionEvent event) {
hideCurrent(null, action, event);
}
}, ApplicationManager.getApplication());
processEnabled();
}
@Override
public void eventDispatched(AWTEvent event) {
if (!myIsEnabled.asBoolean()) return;
MouseEvent me = (MouseEvent)event;
Component c = me.getComponent();
if (me.getID() == MouseEvent.MOUSE_ENTERED) {
boolean canShow = true;
if (c != myCurrentComponent) {
canShow = hideCurrent(me, null, null);
}
if (canShow) {
maybeShowFor(c, me);
}
}
else if (me.getID() == MouseEvent.MOUSE_EXITED) {
if (c == myCurrentComponent || c == myQueuedComponent) {
hideCurrent(me, null, null);
}
}
else if (me.getID() == MouseEvent.MOUSE_MOVED) {
if (c == myCurrentComponent || c == myQueuedComponent) {
if (myCurrentTipUi != null && myCurrentTipUi.wasFadedIn()) {
maybeShowFor(c, me);
}
else {
if (!myCurrentTipIsCentered) {
myX = me.getX();
myY = me.getY();
if (c instanceof JComponent && ((JComponent)c).getToolTipText(me) == null && (myQueuedTooltip == null || !myQueuedTooltip.isHint())) {
hideCurrent(me, null, null);//There is no tooltip or hint here, let's proceed it as MOUSE_EXITED
}
else {
maybeShowFor(c, me);
}
}
}
}
else if (myCurrentComponent == null && myQueuedComponent == null) {
maybeShowFor(c, me);
}
}
else if (me.getID() == MouseEvent.MOUSE_PRESSED) {
if (c == myCurrentComponent) {
hideCurrent(me, null, null);
}
}
else if (me.getID() == MouseEvent.MOUSE_DRAGGED) {
hideCurrent(me, null, null);
}
}
private void maybeShowFor(Component c, MouseEvent me) {
if (!(c instanceof JComponent)) return;
JComponent comp = (JComponent)c;
Window wnd = SwingUtilities.getWindowAncestor(comp);
if (wnd == null) return;
if (!wnd.isActive()) {
if (JBPopupFactory.getInstance().isChildPopupFocused(wnd)) return;
}
String tooltipText = comp.getToolTipText(me);
if (tooltipText == null || tooltipText.trim().isEmpty()) return;
boolean centerDefault = Boolean.TRUE.equals(comp.getClientProperty(UIUtil.CENTER_TOOLTIP_DEFAULT));
boolean centerStrict = Boolean.TRUE.equals(comp.getClientProperty(UIUtil.CENTER_TOOLTIP_STRICT));
int shift = centerStrict ? 0 : centerDefault ? 4 : 0;
// Balloon may appear exactly above useful content, such behavior is rather annoying.
if (c instanceof JTree) {
TreePath path = ((JTree)c).getClosestPathForLocation(me.getX(), me.getY());
if (path != null) {
Rectangle pathBounds = ((JTree)c).getPathBounds(path);
if (pathBounds != null && pathBounds.y + 4 < me.getY()) {
shift += me.getY() - pathBounds.y - 4;
}
}
}
queueShow(comp, me, centerStrict || centerDefault, shift, -shift, -shift);
}
private void queueShow(final JComponent c, final MouseEvent me, final boolean toCenter, int shift, int posChangeX, int posChangeY) {
String aText = String.valueOf(c.getToolTipText(me));
final IdeTooltip tooltip = new IdeTooltip(c, me.getPoint(), null, /*new Object()*/c, aText) {
@Override
protected boolean beforeShow() {
myCurrentEvent = me;
if (!c.isShowing()) return false;
String text = c.getToolTipText(myCurrentEvent);
if (text == null || text.trim().isEmpty()) return false;
JLayeredPane layeredPane = IJSwingUtilities.findParentOfType(c, JLayeredPane.class);
final JEditorPane pane = initPane(text, new HintHint(me).setAwtTooltip(true), layeredPane);
final Wrapper wrapper = new Wrapper(pane);
setTipComponent(wrapper);
return true;
}
}.setToCenter(toCenter).setCalloutShift(shift).setPositionChangeShift(posChangeX, posChangeY).setLayer(Balloon.Layer.top);
show(tooltip, false);
}
public IdeTooltip show(final IdeTooltip tooltip, boolean now) {
return show(tooltip, now, true);
}
public IdeTooltip show(final IdeTooltip tooltip, boolean now, final boolean animationEnabled) {
myAlarm.cancelAllRequests();
hideCurrent(null, tooltip, null, null);
myQueuedComponent = tooltip.getComponent();
myQueuedTooltip = tooltip;
myShowRequest = new Runnable() {
@Override
public void run() {
if (myShowRequest == null) {
return;
}
if (myQueuedComponent != tooltip.getComponent() || !tooltip.getComponent().isShowing()) {
hideCurrent(null, tooltip, null, null, animationEnabled);
return;
}
if (tooltip.beforeShow()) {
show(tooltip, null, animationEnabled);
}
else {
hideCurrent(null, tooltip, null, null, animationEnabled);
}
}
};
if (now) {
myShowRequest.run();
}
else {
myAlarm.addRequest(myShowRequest, myShowDelay ? tooltip.getShowDelay() : tooltip.getInitialReshowDelay());
}
return tooltip;
}
private void show(final IdeTooltip tooltip, @Nullable Runnable beforeShow, boolean animationEnabled) {
boolean toCenterX;
boolean toCenterY;
boolean toCenter = tooltip.isToCenter();
boolean small = false;
if (!toCenter && tooltip.isToCenterIfSmall()) {
Dimension size = tooltip.getComponent().getSize();
toCenterX = size.width < 64;
toCenterY = size.height < 64;
toCenter = toCenterX || toCenterY;
small = true;
}
else {
toCenterX = true;
toCenterY = true;
}
Point effectivePoint = tooltip.getPoint();
if (toCenter) {
Rectangle bounds = tooltip.getComponent().getBounds();
effectivePoint.x = toCenterX ? bounds.width / 2 : effectivePoint.x;
effectivePoint.y = toCenterY ? bounds.height / 2 : effectivePoint.y;
}
if (myCurrentComponent == tooltip.getComponent() && myCurrentTipUi != null && !myCurrentTipUi.isDisposed()) {
myCurrentTipUi.show(new RelativePoint(tooltip.getComponent(), effectivePoint), tooltip.getPreferredPosition());
return;
}
if (myCurrentComponent == tooltip.getComponent() && effectivePoint.equals(new Point(myX, myY))) {
return;
}
Color bg = tooltip.getTextBackground() != null ? tooltip.getTextBackground() : getTextBackground(true);
Color fg = tooltip.getTextForeground() != null ? tooltip.getTextForeground() : getTextForeground(true);
Color border = tooltip.getBorderColor() != null ? tooltip.getBorderColor() : getBorderColor(true);
BalloonBuilder builder = myPopupFactory.createBalloonBuilder(tooltip.getTipComponent())
.setFillColor(bg)
.setBorderColor(border)
.setBorderInsets(tooltip.getBorderInsets())
.setAnimationCycle(animationEnabled ? Registry.intValue("ide.tooltip.animationCycle") : 0)
.setShowCallout(true)
.setCalloutShift(small && tooltip.getCalloutShift() == 0 ? 2 : tooltip.getCalloutShift())
.setPositionChangeXShift(tooltip.getPositionChangeX())
.setPositionChangeYShift(tooltip.getPositionChangeY())
.setHideOnKeyOutside(!tooltip.isExplicitClose())
.setHideOnAction(!tooltip.isExplicitClose())
.setLayer(tooltip.getLayer());
tooltip.getTipComponent().setForeground(fg);
tooltip.getTipComponent().setBorder(new EmptyBorder(1, 3, 2, 3));
tooltip.getTipComponent().setFont(tooltip.getFont() != null ? tooltip.getFont() : getTextFont(true));
if (beforeShow != null) {
beforeShow.run();
}
myCurrentTipUi = (BalloonImpl)builder.createBalloon();
myCurrentTipUi.setAnimationEnabled(animationEnabled);
tooltip.setUi(myCurrentTipUi);
myCurrentComponent = tooltip.getComponent();
myX = effectivePoint.x;
myY = effectivePoint.y;
myCurrentTipIsCentered = toCenter;
myCurrentTooltip = tooltip;
myShowRequest = null;
myQueuedComponent = null;
myQueuedTooltip = null;
myCurrentTipUi.show(new RelativePoint(tooltip.getComponent(), effectivePoint), tooltip.getPreferredPosition());
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
if (myCurrentTooltip == tooltip && tooltip.canBeDismissedOnTimeout()) {
hideCurrent(null, null, null);
}
}
}, tooltip.getDismissDelay());
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Color getTextForeground(boolean awtTooltip) {
return UIUtil.getToolTipForeground();
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Color getLinkForeground(boolean awtTooltip) {
return JBColor.blue;
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Color getTextBackground(boolean awtTooltip) {
return UIUtil.getToolTipBackground();
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public String getUlImg(boolean awtTooltip) {
AllIcons.General.Mdot.getIconWidth(); // keep icon reference
return UIUtil.isUnderDarcula() ? "/general/mdot-white.png" : "/general/mdot.png";
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Color getBorderColor(boolean awtTooltip) {
return new JBColor(Gray._160, new Color(154, 154, 102));
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public boolean isOwnBorderAllowed(boolean awtTooltip) {
return !awtTooltip;
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public boolean isOpaqueAllowed(boolean awtTooltip) {
return !awtTooltip;
}
@SuppressWarnings({"MethodMayBeStatic", "UnusedParameters"})
public Font getTextFont(boolean awtTooltip) {
return UIManager.getFont("ToolTip.font");
}
public boolean hasCurrent() {
return myCurrentTooltip != null;
}
public boolean hideCurrent(@Nullable MouseEvent me) {
return hideCurrent(me, null, null, null);
}
private boolean hideCurrent(@Nullable MouseEvent me, @Nullable AnAction action, @Nullable AnActionEvent event) {
return hideCurrent(me, null, action, event, myCurrentTipUi != null && myCurrentTipUi.isAnimationEnabled());
}
private boolean hideCurrent(@Nullable MouseEvent me,
@Nullable IdeTooltip tooltipToShow,
@Nullable AnAction action,
@Nullable AnActionEvent event) {
return hideCurrent(me, tooltipToShow, action, event, myCurrentTipUi != null && myCurrentTipUi.isAnimationEnabled());
}
private boolean hideCurrent(@Nullable MouseEvent me, @Nullable IdeTooltip tooltipToShow, @Nullable AnAction action, @Nullable AnActionEvent event, final boolean animationEnabled) {
if (myCurrentTooltip != null && me != null && myCurrentTooltip.isInside(RelativePoint.fromScreen(me.getLocationOnScreen()))) {
if (me.getButton() == MouseEvent.NOBUTTON || myCurrentTipUi == null || myCurrentTipUi.isBlockClicks()) {
return false;
}
}
myShowRequest = null;
myQueuedComponent = null;
myQueuedTooltip = null;
if (myCurrentTooltip == null) return true;
if (myCurrentTipUi != null) {
RelativePoint target = me != null ? new RelativePoint(me) : null;
boolean isInside = target != null && myCurrentTipUi.isInside(target);
boolean isMovingForward = target != null && myCurrentTipUi.isMovingForward(target);
boolean canAutoHide = myCurrentTooltip.canAutohideOn(new TooltipEvent(me, isInside || isMovingForward, action, event));
boolean implicitMouseMove = me != null &&
(me.getID() == MouseEvent.MOUSE_MOVED ||
me.getID() == MouseEvent.MOUSE_EXITED ||
me.getID() == MouseEvent.MOUSE_ENTERED);
if (!canAutoHide
|| (myCurrentTooltip.isExplicitClose() && implicitMouseMove)
|| (tooltipToShow != null && !tooltipToShow.isHint() && Comparing.equal(myCurrentTooltip, tooltipToShow))) {
if (myHideRunnable != null) {
myHideRunnable = null;
}
return false;
}
}
myHideRunnable = new Runnable() {
@Override
public void run() {
if (myHideRunnable != null) {
hideCurrentNow(animationEnabled);
myHideRunnable = null;
}
}
};
if (me != null && me.getButton() == MouseEvent.NOBUTTON) {
myAlarm.addRequest(myHideRunnable, Registry.intValue("ide.tooltip.autoDismissDeadZone"));
}
else {
myHideRunnable.run();
myHideRunnable = null;
}
return true;
}
public void hideCurrentNow(boolean animationEnabled) {
if (myCurrentTipUi != null) {
myCurrentTipUi.setAnimationEnabled(animationEnabled);
myCurrentTipUi.hide();
myCurrentTooltip.onHidden();
myShowDelay = false;
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
myShowDelay = true;
}
}, Registry.intValue("ide.tooltip.reshowDelay"));
}
myShowRequest = null;
myCurrentTooltip = null;
myCurrentTipUi = null;
myCurrentComponent = null;
myQueuedComponent = null;
myQueuedTooltip = null;
myCurrentEvent = null;
myCurrentTipIsCentered = false;
myX = -1;
myY = -1;
}
private void processEnabled() {
if (myIsEnabled.asBoolean()) {
ToolTipManager.sharedInstance().setEnabled(false);
}
else {
ToolTipManager.sharedInstance().setEnabled(true);
}
}
@Override
public void disposeComponent() {
}
public static IdeTooltipManager getInstance() {
return ApplicationManager.getApplication().getComponent(IdeTooltipManager.class);
}
public void hide(@Nullable IdeTooltip tooltip) {
if (myCurrentTooltip == tooltip || tooltip == null || tooltip == myQueuedTooltip) {
hideCurrent(null, null, null);
}
}
public void cancelAutoHide() {
myHideRunnable = null;
}
public static JEditorPane initPane(@NonNls String text, final HintHint hintHint, @Nullable final JLayeredPane layeredPane) {
return initPane(new Html(text), hintHint, layeredPane);
}
public static JEditorPane initPane(@NonNls Html html, final HintHint hintHint, @Nullable final JLayeredPane layeredPane) {
final Ref<Dimension> prefSize = new Ref<Dimension>(null);
@NonNls String text = HintUtil.prepareHintText(html, hintHint);
final boolean[] prefSizeWasComputed = {false};
final JEditorPane pane = new JEditorPane() {
@Override
public Dimension getPreferredSize() {
if (!prefSizeWasComputed[0] && hintHint.isAwtTooltip()) {
JLayeredPane lp = layeredPane;
if (lp == null) {
JRootPane rootPane = UIUtil.getRootPane(this);
if (rootPane != null) {
lp = rootPane.getLayeredPane();
}
}
Dimension size;
if (lp != null) {
size = lp.getSize();
prefSizeWasComputed[0] = true;
}
else {
size = ScreenUtil.getScreenRectangle(0, 0).getSize();
}
int fitWidth = (int)(size.width * 0.8);
Dimension prefSizeOriginal = super.getPreferredSize();
if (prefSizeOriginal.width > fitWidth) {
setSize(new Dimension(fitWidth, Integer.MAX_VALUE));
Dimension fixedWidthSize = super.getPreferredSize();
Dimension minSize = super.getMinimumSize();
prefSize.set(new Dimension(fitWidth > minSize.width ? fitWidth : minSize.width, fixedWidthSize.height));
}
else {
prefSize.set(new Dimension(prefSizeOriginal));
}
}
Dimension s = prefSize.get() != null ? new Dimension(prefSize.get()) : super.getPreferredSize();
Border b = getBorder();
if (b != null) {
JBInsets.addTo(s, b.getBorderInsets(this));
}
return s;
}
@Override
public void setPreferredSize(Dimension preferredSize) {
super.setPreferredSize(preferredSize);
prefSize.set(preferredSize);
}
};
final HTMLEditorKit.HTMLFactory factory = new HTMLEditorKit.HTMLFactory() {
@Override
public View create(Element elem) {
AttributeSet attrs = elem.getAttributes();
Object elementName = attrs.getAttribute(AbstractDocument.ElementNameAttribute);
Object o = elementName != null ? null : attrs.getAttribute(StyleConstants.NameAttribute);
if (o instanceof HTML.Tag) {
HTML.Tag kind = (HTML.Tag)o;
if (kind == HTML.Tag.HR) {
return new CustomHrView(elem, hintHint.getTextForeground());
}
}
return super.create(elem);
}
};
HTMLEditorKit kit = new HTMLEditorKit() {
@Override
public ViewFactory getViewFactory() {
return factory;
}
};
pane.setEditorKit(kit);
pane.setText(text);
pane.setCaretPosition(0);
pane.setEditable(false);
if (hintHint.isOwnBorderAllowed()) {
setBorder(pane);
setColors(pane);
}
else {
pane.setBorder(null);
}
if (!hintHint.isAwtTooltip()) {
prefSizeWasComputed[0] = true;
}
final boolean opaque = hintHint.isOpaqueAllowed();
pane.setOpaque(opaque);
if (UIUtil.isUnderNimbusLookAndFeel() && !opaque) {
pane.setBackground(UIUtil.TRANSPARENT_COLOR);
}
else {
pane.setBackground(hintHint.getTextBackground());
}
return pane;
}
public static void setColors(JComponent pane) {
pane.setForeground(JBColor.foreground());
pane.setBackground(HintUtil.INFORMATION_COLOR);
pane.setOpaque(true);
}
public static void setBorder(JComponent pane) {
pane.setBorder(
BorderFactory.createCompoundBorder(BorderFactory.createLineBorder(Color.black), BorderFactory.createEmptyBorder(0, 5, 0, 5)));
}
@NotNull
@Override
public String getComponentName() {
return "IDE Tooltip Manager";
}
public boolean isQueuedToShow(IdeTooltip tooltip) {
return Comparing.equal(myQueuedTooltip, tooltip);
}
}
| IDEA-98746 Annoying editor tab tooltips
| platform/platform-impl/src/com/intellij/ide/IdeTooltipManager.java | IDEA-98746 Annoying editor tab tooltips | <ide><path>latform/platform-impl/src/com/intellij/ide/IdeTooltipManager.java
<ide> }
<ide> }
<ide> else if (me.getID() == MouseEvent.MOUSE_PRESSED) {
<del> if (c == myCurrentComponent) {
<del> hideCurrent(me, null, null);
<add> boolean clickOnTooltip = myCurrentTipUi != null && myCurrentTipUi == JBPopupFactory.getInstance().getParentBalloonFor(c);
<add> if (c == myCurrentComponent || clickOnTooltip) {
<add> hideCurrent(me, null, null, null, !clickOnTooltip);
<ide> }
<ide> }
<ide> else if (me.getID() == MouseEvent.MOUSE_DRAGGED) { |
|
Java | bsd-2-clause | 1851f845396a57c9163098c39653d2381c4dd391 | 0 | sixshot626/h2o,sixshot626/h2o,sixshot626/h2o | package h2o.flow.pvm;
import h2o.common.lang.Val;
import h2o.common.util.collection.ListBuilder;
import h2o.flow.pvm.elements.Line;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public class ExecResult {
private final RunStatus status;
private final List<Line> lines;
private final Val<Object> result;
public ExecResult( Object result , RunStatus status ) {
this.status = status;
this.lines = Collections.emptyList();
this.result = new Val<>(result);
}
public ExecResult( Object result , RunStatus status, Line... lines ) {
this.status = status;
this.lines = Collections.unmodifiableList(ListBuilder.newList(lines));
this.result = new Val<>(result);
}
public ExecResult( Object result , RunStatus status, Collection<Line> lines ) {
this.status = status;
this.lines = Collections.unmodifiableList(ListBuilder.newListAndAddAll( lines ) );
this.result = new Val<>(result);
}
public ExecResult(RunStatus status) {
this.status = status;
this.lines = Collections.emptyList();
this.result = Val.empty();
}
public ExecResult(RunStatus status, Line... lines ) {
this.status = status;
this.lines = Collections.unmodifiableList(ListBuilder.newList(lines));
this.result = Val.empty();
}
public ExecResult(RunStatus status, Collection<Line> lines ) {
this.status = status;
this.lines = Collections.unmodifiableList(ListBuilder.newListAndAddAll( lines ) );
this.result = Val.empty();
}
public static ExecResult pause( Object result ) {
return new ExecResult( result , RunStatus.PAUSE );
}
public static ExecResult end( Object result ) {
return new ExecResult( result , RunStatus.END );
}
public static ExecResult exception( Object result ) {
return new ExecResult( result , RunStatus.EXCEPTION );
}
public static ExecResult goOn( Object result , Line... lines ) {
return new ExecResult( result , RunStatus.RUNNING , lines );
}
public static ExecResult goOn( Object result , Collection<Line> lines ) {
return new ExecResult( result , RunStatus.RUNNING , lines );
}
public static ExecResult pause() {
return new ExecResult( RunStatus.PAUSE );
}
public static ExecResult end() {
return new ExecResult( RunStatus.END );
}
public static ExecResult exception() {
return new ExecResult( RunStatus.EXCEPTION );
}
public static ExecResult goOn( Line... lines ) {
return new ExecResult( RunStatus.RUNNING , lines );
}
public static ExecResult goOn( Collection<Line> lines ) {
return new ExecResult( RunStatus.RUNNING , lines );
}
public RunStatus getStatus() {
return status;
}
public List<Line> getLines() {
return lines;
}
public Val<Object> getResult() {
return result;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ExecResult{");
sb.append("status=").append(status);
sb.append(", lines=").append(lines);
sb.append(", result=").append(result);
sb.append('}');
return sb.toString();
}
}
| h2o-flow/src/main/java/h2o/flow/pvm/ExecResult.java | package h2o.flow.pvm;
import h2o.common.lang.Val;
import h2o.common.util.collection.ListBuilder;
import h2o.flow.pvm.elements.Line;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public class ExecResult {
private final RunStatus status;
private final List<Line> lines;
private final Val<Object> result;
public ExecResult( Object result , RunStatus status ) {
this.status = status;
this.lines = Collections.emptyList();
this.result = new Val<>(result);
}
public ExecResult( Object result , RunStatus status, Line... lines ) {
this.status = status;
this.lines = Collections.unmodifiableList(ListBuilder.newList(lines));
this.result = new Val<>(result);
}
public ExecResult( Object result , RunStatus status, Collection<Line> lines ) {
this.status = status;
this.lines = Collections.unmodifiableList(ListBuilder.newListAndAddAll( lines ) );
this.result = new Val<>(result);
}
public ExecResult(RunStatus status) {
this.status = status;
this.lines = Collections.emptyList();
this.result = Val.empty();
}
public ExecResult(RunStatus status, Line... lines ) {
this.status = status;
this.lines = Collections.unmodifiableList(ListBuilder.newList(lines));
this.result = Val.empty();
}
public ExecResult(RunStatus status, Collection<Line> lines ) {
this.status = status;
this.lines = Collections.unmodifiableList(ListBuilder.newListAndAddAll( lines ) );
this.result = Val.empty();
}
public static ExecResult pause() {
return new ExecResult( RunStatus.PAUSE );
}
public static ExecResult end() {
return new ExecResult( RunStatus.END );
}
public static ExecResult exception() {
return new ExecResult( RunStatus.EXCEPTION );
}
public static ExecResult goOn( Line... lines ) {
return new ExecResult( RunStatus.RUNNING , lines );
}
public static ExecResult goOn( Collection<Line> lines ) {
return new ExecResult( RunStatus.RUNNING , lines );
}
public RunStatus getStatus() {
return status;
}
public List<Line> getLines() {
return lines;
}
public Val<Object> getResult() {
return result;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ExecResult{");
sb.append("status=").append(status);
sb.append(", lines=").append(lines);
sb.append(", result=").append(result);
sb.append('}');
return sb.toString();
}
}
| 添加静态便利方法
| h2o-flow/src/main/java/h2o/flow/pvm/ExecResult.java | 添加静态便利方法 | <ide><path>2o-flow/src/main/java/h2o/flow/pvm/ExecResult.java
<ide> }
<ide>
<ide>
<add>
<add> public static ExecResult pause( Object result ) {
<add> return new ExecResult( result , RunStatus.PAUSE );
<add> }
<add>
<add> public static ExecResult end( Object result ) {
<add> return new ExecResult( result , RunStatus.END );
<add> }
<add>
<add> public static ExecResult exception( Object result ) {
<add> return new ExecResult( result , RunStatus.EXCEPTION );
<add> }
<add>
<add> public static ExecResult goOn( Object result , Line... lines ) {
<add> return new ExecResult( result , RunStatus.RUNNING , lines );
<add> }
<add>
<add> public static ExecResult goOn( Object result , Collection<Line> lines ) {
<add> return new ExecResult( result , RunStatus.RUNNING , lines );
<add> }
<add>
<add>
<ide> public static ExecResult pause() {
<ide> return new ExecResult( RunStatus.PAUSE );
<ide> } |
|
Java | apache-2.0 | 70d9e97cfa33a4d768e93ec633ebe762bc426835 | 0 | nikos/edison-microservice,nikos/edison-microservice,nikos/edison-microservice | package de.otto.edison.jobs.repository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.metrics.CounterService;
import org.springframework.scheduling.annotation.Scheduled;
import java.util.List;
/**
* A component that is responsible for cleaning up the job repository.
*
* All JobCleanup strategies are automatically registered and executed every minute.
*
* @author Guido Steinacker
* @since 01.03.15
*/
public class JobRepositoryCleanup {
private static final Logger LOG = LoggerFactory.getLogger(JobRepositoryCleanup.class);
public static final long ONE_MINUTE = 60 * 1000L;
@Autowired
private CounterService counterService;
@Autowired
private JobRepository repository;
@Autowired
private List<JobCleanupStrategy> strategies;
@Scheduled(fixedDelay = ONE_MINUTE)
public void cleanup() {
try {
for (final JobCleanupStrategy strategy : strategies) {
strategy.doCleanUp(repository);
}
} catch (final RuntimeException e) {
LOG.error(e.getMessage(), e);
counterService.increment("counter.jobs.cleanup.errors");
}
}
}
| jobs/src/main/java/de/otto/edison/jobs/repository/JobRepositoryCleanup.java | package de.otto.edison.jobs.repository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.metrics.CounterService;
import org.springframework.scheduling.annotation.Scheduled;
import java.util.List;
/**
* A component that is responsible for cleaning up the job repository.
*
* All JobCleanup strategies are automatically registered and executed every minute.
*
* @author Guido Steinacker
* @since 01.03.15
*/
public class JobRepositoryCleanup {
public static final long ONE_MINUTE = 60 * 1000L;
@Autowired
private CounterService counterService;
@Autowired
private JobRepository repository;
@Autowired
private List<JobCleanupStrategy> strategies;
@Scheduled(fixedDelay = ONE_MINUTE)
public void cleanup() {
try {
for (final JobCleanupStrategy strategy : strategies) {
strategy.doCleanUp(repository);
}
} catch (final RuntimeException e) {
counterService.increment("counter.jobs.cleanup.errors");
}
}
}
| Added logging of exceptions during cleanup of job repositories
| jobs/src/main/java/de/otto/edison/jobs/repository/JobRepositoryCleanup.java | Added logging of exceptions during cleanup of job repositories | <ide><path>obs/src/main/java/de/otto/edison/jobs/repository/JobRepositoryCleanup.java
<ide> package de.otto.edison.jobs.repository;
<ide>
<add>import org.slf4j.Logger;
<add>import org.slf4j.LoggerFactory;
<ide> import org.springframework.beans.factory.annotation.Autowired;
<ide> import org.springframework.boot.actuate.metrics.CounterService;
<ide> import org.springframework.scheduling.annotation.Scheduled;
<ide> * @since 01.03.15
<ide> */
<ide> public class JobRepositoryCleanup {
<add>
<add> private static final Logger LOG = LoggerFactory.getLogger(JobRepositoryCleanup.class);
<ide>
<ide> public static final long ONE_MINUTE = 60 * 1000L;
<ide>
<ide> strategy.doCleanUp(repository);
<ide> }
<ide> } catch (final RuntimeException e) {
<add> LOG.error(e.getMessage(), e);
<ide> counterService.increment("counter.jobs.cleanup.errors");
<ide> }
<ide> } |
|
Java | apache-2.0 | 3391ec5377b5f2204dab2c14b38893e0e3556628 | 0 | brat000012001/keycloak,vmuzikar/keycloak,pedroigor/keycloak,jpkrohling/keycloak,darranl/keycloak,mposolda/keycloak,reneploetz/keycloak,raehalme/keycloak,vmuzikar/keycloak,raehalme/keycloak,raehalme/keycloak,stianst/keycloak,brat000012001/keycloak,abstractj/keycloak,keycloak/keycloak,vmuzikar/keycloak,jpkrohling/keycloak,srose/keycloak,mposolda/keycloak,ahus1/keycloak,pedroigor/keycloak,pedroigor/keycloak,pedroigor/keycloak,raehalme/keycloak,abstractj/keycloak,thomasdarimont/keycloak,jpkrohling/keycloak,ahus1/keycloak,hmlnarik/keycloak,darranl/keycloak,thomasdarimont/keycloak,ssilvert/keycloak,vmuzikar/keycloak,raehalme/keycloak,keycloak/keycloak,stianst/keycloak,srose/keycloak,keycloak/keycloak,ahus1/keycloak,stianst/keycloak,brat000012001/keycloak,hmlnarik/keycloak,mposolda/keycloak,pedroigor/keycloak,stianst/keycloak,abstractj/keycloak,mhajas/keycloak,hmlnarik/keycloak,ssilvert/keycloak,thomasdarimont/keycloak,mhajas/keycloak,keycloak/keycloak,mposolda/keycloak,thomasdarimont/keycloak,jpkrohling/keycloak,pedroigor/keycloak,mhajas/keycloak,vmuzikar/keycloak,srose/keycloak,ssilvert/keycloak,stianst/keycloak,brat000012001/keycloak,darranl/keycloak,ssilvert/keycloak,ahus1/keycloak,ssilvert/keycloak,mposolda/keycloak,hmlnarik/keycloak,hmlnarik/keycloak,reneploetz/keycloak,abstractj/keycloak,reneploetz/keycloak,raehalme/keycloak,jpkrohling/keycloak,abstractj/keycloak,darranl/keycloak,reneploetz/keycloak,reneploetz/keycloak,srose/keycloak,thomasdarimont/keycloak,vmuzikar/keycloak,ahus1/keycloak,srose/keycloak,mhajas/keycloak,mposolda/keycloak,ahus1/keycloak,thomasdarimont/keycloak,brat000012001/keycloak,mhajas/keycloak,keycloak/keycloak,hmlnarik/keycloak | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.adapters.servlet;
import org.keycloak.adapters.spi.AdapterSessionStore;
import org.keycloak.adapters.spi.HttpFacade;
import org.keycloak.adapters.spi.KeycloakAccount;
import org.keycloak.common.util.Encode;
import org.keycloak.common.util.MultivaluedHashMap;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpSession;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.security.Principal;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author <a href="mailto:[email protected]">Bill Burke</a>
* @version $Revision: 1 $
*/
public class FilterSessionStore implements AdapterSessionStore {
public static final String REDIRECT_URI = "__REDIRECT_URI";
public static final String SAVED_METHOD = "__SAVED_METHOD";
public static final String SAVED_HEADERS = "__SAVED_HEADERS";
public static final String SAVED_BODY = "__SAVED_BODY";
protected final HttpServletRequest request;
protected final HttpFacade facade;
protected final int maxBuffer;
protected byte[] restoredBuffer = null;
protected boolean needRequestRestore;
public FilterSessionStore(HttpServletRequest request, HttpFacade facade, int maxBuffer) {
this.request = request;
this.facade = facade;
this.maxBuffer = maxBuffer;
}
public void clearSavedRequest(HttpSession session) {
session.removeAttribute(REDIRECT_URI);
session.removeAttribute(SAVED_METHOD);
session.removeAttribute(SAVED_HEADERS);
session.removeAttribute(SAVED_BODY);
}
public void servletRequestLogout() {
}
public static String getCharsetFromContentType(String contentType) {
if (contentType == null)
return (null);
int start = contentType.indexOf("charset=");
if (start < 0)
return (null);
String encoding = contentType.substring(start + 8);
int end = encoding.indexOf(';');
if (end >= 0)
encoding = encoding.substring(0, end);
encoding = encoding.trim();
if ((encoding.length() > 2) && (encoding.startsWith("\""))
&& (encoding.endsWith("\"")))
encoding = encoding.substring(1, encoding.length() - 1);
return (encoding.trim());
}
public HttpServletRequestWrapper buildWrapper(HttpSession session, final KeycloakAccount account) {
if (needRequestRestore) {
final String method = (String)session.getAttribute(SAVED_METHOD);
final byte[] body = (byte[])session.getAttribute(SAVED_BODY);
final MultivaluedHashMap<String, String> headers = (MultivaluedHashMap<String, String>)session.getAttribute(SAVED_HEADERS);
clearSavedRequest(session);
HttpServletRequestWrapper wrapper = new HttpServletRequestWrapper(request) {
protected MultivaluedHashMap<String, String> parameters;
MultivaluedHashMap<String, String> getParams() {
if (parameters != null) return parameters;
if (body == null) return new MultivaluedHashMap<String, String>();
String contentType = getContentType();
if (contentType != null && contentType.toLowerCase().startsWith("application/x-www-form-urlencoded")) {
ByteArrayInputStream is = new ByteArrayInputStream(body);
try {
parameters = parseForm(is);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return parameters;
}
@Override
public boolean isUserInRole(String role) {
return account.getRoles().contains(role);
}
@Override
public Principal getUserPrincipal() {
return account.getPrincipal();
}
@Override
public String getMethod() {
if (needRequestRestore) {
return method;
} else {
return super.getMethod();
}
}
@Override
public String getHeader(String name) {
if (needRequestRestore && headers != null) {
return headers.getFirst(name.toLowerCase());
}
return super.getHeader(name);
}
@Override
public Enumeration<String> getHeaders(String name) {
if (needRequestRestore && headers != null) {
List<String> values = headers.getList(name.toLowerCase());
if (values == null) return Collections.emptyEnumeration();
else return Collections.enumeration(values);
}
return super.getHeaders(name);
}
@Override
public Enumeration<String> getHeaderNames() {
if (needRequestRestore && headers != null) {
return Collections.enumeration(headers.keySet());
}
return super.getHeaderNames();
}
@Override
public ServletInputStream getInputStream() throws IOException {
if (needRequestRestore && body != null) {
final ByteArrayInputStream is = new ByteArrayInputStream(body);
return new ServletInputStream() {
@Override
public int read() throws IOException {
return is.read();
}
};
}
return super.getInputStream();
}
@Override
public void logout() throws ServletException {
servletRequestLogout();
}
@Override
public long getDateHeader(String name) {
if (!needRequestRestore) return super.getDateHeader(name);
return -1;
}
@Override
public int getIntHeader(String name) {
if (!needRequestRestore) return super.getIntHeader(name);
String value = getHeader(name);
if (value == null) return -1;
return Integer.valueOf(value);
}
@Override
public String[] getParameterValues(String name) {
if (!needRequestRestore) return super.getParameterValues(name);
MultivaluedHashMap<String, String> formParams = getParams();
if (formParams == null) {
return super.getParameterValues(name);
}
String[] values = request.getParameterValues(name);
List<String> list = new LinkedList<>();
if (values != null) {
for (String val : values) list.add(val);
}
List<String> vals = formParams.get(name);
if (vals != null) list.addAll(vals);
return list.toArray(new String[list.size()]);
}
@Override
public Enumeration<String> getParameterNames() {
if (!needRequestRestore) return super.getParameterNames();
MultivaluedHashMap<String, String> formParams = getParams();
if (formParams == null) {
return super.getParameterNames();
}
Set<String> names = new HashSet<>();
Enumeration<String> qnames = super.getParameterNames();
while (qnames.hasMoreElements()) names.add(qnames.nextElement());
names.addAll(formParams.keySet());
return Collections.enumeration(names);
}
@Override
public Map<String, String[]> getParameterMap() {
if (!needRequestRestore) return super.getParameterMap();
MultivaluedHashMap<String, String> formParams = getParams();
if (formParams == null) {
return super.getParameterMap();
}
Map<String, String[]> map = new HashMap<>();
Enumeration<String> names = getParameterNames();
while (names.hasMoreElements()) {
String name = names.nextElement();
String[] values = getParameterValues(name);
if (values != null) {
map.put(name, values);
}
}
return map;
}
@Override
public String getParameter(String name) {
if (!needRequestRestore) return super.getParameter(name);
String param = super.getParameter(name);
if (param != null) return param;
MultivaluedHashMap<String, String> formParams = getParams();
if (formParams == null) {
return null;
}
return formParams.getFirst(name);
}
@Override
public BufferedReader getReader() throws IOException {
if (!needRequestRestore) return super.getReader();
return new BufferedReader(new InputStreamReader(getInputStream()));
}
@Override
public int getContentLength() {
if (!needRequestRestore) return super.getContentLength();
String header = getHeader("content-length");
if (header == null) return -1;
return Integer.valueOf(header);
}
@Override
public String getContentType() {
if (!needRequestRestore) return super.getContentType();
return getHeader("content-type");
}
@Override
public String getCharacterEncoding() {
if (!needRequestRestore) return super.getCharacterEncoding();
return getCharsetFromContentType(getContentType());
}
};
return wrapper;
} else {
return new HttpServletRequestWrapper(request) {
@Override
public boolean isUserInRole(String role) {
return account.getRoles().contains(role);
}
@Override
public Principal getUserPrincipal() {
if (account == null) return null;
return account.getPrincipal();
}
@Override
public void logout() throws ServletException {
servletRequestLogout();
}
};
}
}
public String getRedirectUri() {
HttpSession session = request.getSession(true);
return (String)session.getAttribute(REDIRECT_URI);
}
@Override
public boolean restoreRequest() {
HttpSession session = request.getSession(false);
if (session == null) return false;
return session.getAttribute(REDIRECT_URI) != null;
}
public static MultivaluedHashMap<String, String> parseForm(InputStream entityStream)
throws IOException
{
char[] buffer = new char[100];
StringBuffer buf = new StringBuffer();
BufferedReader reader = new BufferedReader(new InputStreamReader(entityStream));
int wasRead = 0;
do
{
wasRead = reader.read(buffer, 0, 100);
if (wasRead > 0) buf.append(buffer, 0, wasRead);
} while (wasRead > -1);
String form = buf.toString();
MultivaluedHashMap<String, String> formData = new MultivaluedHashMap<String, String>();
if ("".equals(form)) return formData;
String[] params = form.split("&");
for (String param : params)
{
if (param.indexOf('=') >= 0)
{
String[] nv = param.split("=");
String val = nv.length > 1 ? nv[1] : "";
formData.add(Encode.decode(nv[0]), Encode.decode(val));
}
else
{
formData.add(Encode.decode(param), "");
}
}
return formData;
}
@Override
public void saveRequest() {
HttpSession session = request.getSession(true);
session.setAttribute(REDIRECT_URI, facade.getRequest().getURI());
session.setAttribute(SAVED_METHOD, request.getMethod());
MultivaluedHashMap<String, String> headers = new MultivaluedHashMap<>();
Enumeration<String> names = request.getHeaderNames();
while (names.hasMoreElements()) {
String name = names.nextElement();
Enumeration<String> values = request.getHeaders(name);
while (values.hasMoreElements()) {
headers.add(name.toLowerCase(), values.nextElement());
}
}
session.setAttribute(SAVED_HEADERS, headers);
if (request.getMethod().equalsIgnoreCase("GET")) {
return;
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int bytesRead;
int totalRead = 0;
try {
InputStream is = request.getInputStream();
while ( (bytesRead = is.read(buffer) ) >= 0) {
os.write(buffer);
totalRead += bytesRead;
if (totalRead > maxBuffer) {
throw new RuntimeException("max buffer reached on a saved request");
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
byte[] body = os.toByteArray();
// Only save the request body if there is something to save
if (body.length > 0) {
session.setAttribute(SAVED_BODY, body);
}
}
}
| adapters/spi/servlet-adapter-spi/src/main/java/org/keycloak/adapters/servlet/FilterSessionStore.java | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.adapters.servlet;
import org.keycloak.adapters.spi.AdapterSessionStore;
import org.keycloak.adapters.spi.HttpFacade;
import org.keycloak.adapters.spi.KeycloakAccount;
import org.keycloak.common.util.Encode;
import org.keycloak.common.util.MultivaluedHashMap;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpSession;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.security.Principal;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author <a href="mailto:[email protected]">Bill Burke</a>
* @version $Revision: 1 $
*/
public class FilterSessionStore implements AdapterSessionStore {
public static final String REDIRECT_URI = "__REDIRECT_URI";
public static final String SAVED_METHOD = "__SAVED_METHOD";
public static final String SAVED_HEADERS = "__SAVED_HEADERS";
public static final String SAVED_BODY = "__SAVED_BODY";
protected final HttpServletRequest request;
protected final HttpFacade facade;
protected final int maxBuffer;
protected byte[] restoredBuffer = null;
protected boolean needRequestRestore;
public FilterSessionStore(HttpServletRequest request, HttpFacade facade, int maxBuffer) {
this.request = request;
this.facade = facade;
this.maxBuffer = maxBuffer;
}
public void clearSavedRequest(HttpSession session) {
session.removeAttribute(REDIRECT_URI);
session.removeAttribute(SAVED_METHOD);
session.removeAttribute(SAVED_HEADERS);
session.removeAttribute(SAVED_BODY);
}
public void servletRequestLogout() {
}
public static String getCharsetFromContentType(String contentType) {
if (contentType == null)
return (null);
int start = contentType.indexOf("charset=");
if (start < 0)
return (null);
String encoding = contentType.substring(start + 8);
int end = encoding.indexOf(';');
if (end >= 0)
encoding = encoding.substring(0, end);
encoding = encoding.trim();
if ((encoding.length() > 2) && (encoding.startsWith("\""))
&& (encoding.endsWith("\"")))
encoding = encoding.substring(1, encoding.length() - 1);
return (encoding.trim());
}
public HttpServletRequestWrapper buildWrapper(HttpSession session, final KeycloakAccount account) {
if (needRequestRestore) {
final String method = (String)session.getAttribute(SAVED_METHOD);
final byte[] body = (byte[])session.getAttribute(SAVED_BODY);
final MultivaluedHashMap<String, String> headers = (MultivaluedHashMap<String, String>)session.getAttribute(SAVED_HEADERS);
clearSavedRequest(session);
HttpServletRequestWrapper wrapper = new HttpServletRequestWrapper(request) {
protected MultivaluedHashMap<String, String> parameters;
MultivaluedHashMap<String, String> getParams() {
if (parameters != null) return parameters;
if (body == null) return new MultivaluedHashMap<String, String>();
String contentType = getContentType();
contentType = contentType.toLowerCase();
if (contentType.startsWith("application/x-www-form-urlencoded")) {
ByteArrayInputStream is = new ByteArrayInputStream(body);
try {
parameters = parseForm(is);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
return parameters;
}
@Override
public boolean isUserInRole(String role) {
return account.getRoles().contains(role);
}
@Override
public Principal getUserPrincipal() {
return account.getPrincipal();
}
@Override
public String getMethod() {
if (needRequestRestore) {
return method;
} else {
return super.getMethod();
}
}
@Override
public String getHeader(String name) {
if (needRequestRestore && headers != null) {
return headers.getFirst(name.toLowerCase());
}
return super.getHeader(name);
}
@Override
public Enumeration<String> getHeaders(String name) {
if (needRequestRestore && headers != null) {
List<String> values = headers.getList(name.toLowerCase());
if (values == null) return Collections.emptyEnumeration();
else return Collections.enumeration(values);
}
return super.getHeaders(name);
}
@Override
public Enumeration<String> getHeaderNames() {
if (needRequestRestore && headers != null) {
return Collections.enumeration(headers.keySet());
}
return super.getHeaderNames();
}
@Override
public ServletInputStream getInputStream() throws IOException {
if (needRequestRestore && body != null) {
final ByteArrayInputStream is = new ByteArrayInputStream(body);
return new ServletInputStream() {
@Override
public int read() throws IOException {
return is.read();
}
};
}
return super.getInputStream();
}
@Override
public void logout() throws ServletException {
servletRequestLogout();
}
@Override
public long getDateHeader(String name) {
if (!needRequestRestore) return super.getDateHeader(name);
return -1;
}
@Override
public int getIntHeader(String name) {
if (!needRequestRestore) return super.getIntHeader(name);
String value = getHeader(name);
if (value == null) return -1;
return Integer.valueOf(value);
}
@Override
public String[] getParameterValues(String name) {
if (!needRequestRestore) return super.getParameterValues(name);
MultivaluedHashMap<String, String> formParams = getParams();
if (formParams == null) {
return super.getParameterValues(name);
}
String[] values = request.getParameterValues(name);
List<String> list = new LinkedList<>();
if (values != null) {
for (String val : values) list.add(val);
}
List<String> vals = formParams.get(name);
if (vals != null) list.addAll(vals);
return list.toArray(new String[list.size()]);
}
@Override
public Enumeration<String> getParameterNames() {
if (!needRequestRestore) return super.getParameterNames();
MultivaluedHashMap<String, String> formParams = getParams();
if (formParams == null) {
return super.getParameterNames();
}
Set<String> names = new HashSet<>();
Enumeration<String> qnames = super.getParameterNames();
while (qnames.hasMoreElements()) names.add(qnames.nextElement());
names.addAll(formParams.keySet());
return Collections.enumeration(names);
}
@Override
public Map<String, String[]> getParameterMap() {
if (!needRequestRestore) return super.getParameterMap();
MultivaluedHashMap<String, String> formParams = getParams();
if (formParams == null) {
return super.getParameterMap();
}
Map<String, String[]> map = new HashMap<>();
Enumeration<String> names = getParameterNames();
while (names.hasMoreElements()) {
String name = names.nextElement();
String[] values = getParameterValues(name);
if (values != null) {
map.put(name, values);
}
}
return map;
}
@Override
public String getParameter(String name) {
if (!needRequestRestore) return super.getParameter(name);
String param = super.getParameter(name);
if (param != null) return param;
MultivaluedHashMap<String, String> formParams = getParams();
if (formParams == null) {
return null;
}
return formParams.getFirst(name);
}
@Override
public BufferedReader getReader() throws IOException {
if (!needRequestRestore) return super.getReader();
return new BufferedReader(new InputStreamReader(getInputStream()));
}
@Override
public int getContentLength() {
if (!needRequestRestore) return super.getContentLength();
String header = getHeader("content-length");
if (header == null) return -1;
return Integer.valueOf(header);
}
@Override
public String getContentType() {
if (!needRequestRestore) return super.getContentType();
return getHeader("content-type");
}
@Override
public String getCharacterEncoding() {
if (!needRequestRestore) return super.getCharacterEncoding();
return getCharsetFromContentType(getContentType());
}
};
return wrapper;
} else {
return new HttpServletRequestWrapper(request) {
@Override
public boolean isUserInRole(String role) {
return account.getRoles().contains(role);
}
@Override
public Principal getUserPrincipal() {
if (account == null) return null;
return account.getPrincipal();
}
@Override
public void logout() throws ServletException {
servletRequestLogout();
}
};
}
}
public String getRedirectUri() {
HttpSession session = request.getSession(true);
return (String)session.getAttribute(REDIRECT_URI);
}
@Override
public boolean restoreRequest() {
HttpSession session = request.getSession(false);
if (session == null) return false;
return session.getAttribute(REDIRECT_URI) != null;
}
public static MultivaluedHashMap<String, String> parseForm(InputStream entityStream)
throws IOException
{
char[] buffer = new char[100];
StringBuffer buf = new StringBuffer();
BufferedReader reader = new BufferedReader(new InputStreamReader(entityStream));
int wasRead = 0;
do
{
wasRead = reader.read(buffer, 0, 100);
if (wasRead > 0) buf.append(buffer, 0, wasRead);
} while (wasRead > -1);
String form = buf.toString();
MultivaluedHashMap<String, String> formData = new MultivaluedHashMap<String, String>();
if ("".equals(form)) return formData;
String[] params = form.split("&");
for (String param : params)
{
if (param.indexOf('=') >= 0)
{
String[] nv = param.split("=");
String val = nv.length > 1 ? nv[1] : "";
formData.add(Encode.decode(nv[0]), Encode.decode(val));
}
else
{
formData.add(Encode.decode(param), "");
}
}
return formData;
}
@Override
public void saveRequest() {
HttpSession session = request.getSession(true);
session.setAttribute(REDIRECT_URI, facade.getRequest().getURI());
session.setAttribute(SAVED_METHOD, request.getMethod());
MultivaluedHashMap<String, String> headers = new MultivaluedHashMap<>();
Enumeration<String> names = request.getHeaderNames();
while (names.hasMoreElements()) {
String name = names.nextElement();
Enumeration<String> values = request.getHeaders(name);
while (values.hasMoreElements()) {
headers.add(name.toLowerCase(), values.nextElement());
}
}
session.setAttribute(SAVED_HEADERS, headers);
if (request.getMethod().equalsIgnoreCase("GET")) {
return;
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int bytesRead;
int totalRead = 0;
try {
InputStream is = request.getInputStream();
while ( (bytesRead = is.read(buffer) ) >= 0) {
os.write(buffer);
totalRead += bytesRead;
if (totalRead > maxBuffer) {
throw new RuntimeException("max buffer reached on a saved request");
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
byte[] body = os.toByteArray();
// Only save the request body if there is something to save
if (body.length > 0) {
session.setAttribute(SAVED_BODY, body);
}
}
}
| check if content-type is null when restoring request
| adapters/spi/servlet-adapter-spi/src/main/java/org/keycloak/adapters/servlet/FilterSessionStore.java | check if content-type is null when restoring request | <ide><path>dapters/spi/servlet-adapter-spi/src/main/java/org/keycloak/adapters/servlet/FilterSessionStore.java
<ide> if (body == null) return new MultivaluedHashMap<String, String>();
<ide>
<ide> String contentType = getContentType();
<del> contentType = contentType.toLowerCase();
<del> if (contentType.startsWith("application/x-www-form-urlencoded")) {
<add> if (contentType != null && contentType.toLowerCase().startsWith("application/x-www-form-urlencoded")) {
<ide> ByteArrayInputStream is = new ByteArrayInputStream(body);
<ide> try {
<ide> parameters = parseForm(is); |
|
Java | agpl-3.0 | 23831d7df6c2f8e6a324c6603662d04e1fe1d59d | 0 | ozwillo/ozwillo-kernel,ozwillo/ozwillo-kernel,ozwillo/ozwillo-kernel | package oasis.jongo.accounts;
import javax.inject.Inject;
import org.jongo.Jongo;
import org.jongo.MongoCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.WriteResult;
import oasis.model.InvalidVersionException;
import oasis.model.accounts.Account;
import oasis.model.accounts.AccountRepository;
import oasis.model.accounts.AgentAccount;
import oasis.model.accounts.UserAccount;
public class JongoAccountRepository implements AccountRepository {
private static final Logger logger = LoggerFactory.getLogger(JongoAccountRepository.class);
private final Jongo jongo;
@Inject
JongoAccountRepository(Jongo jongo) {
this.jongo = jongo;
}
protected MongoCollection getAccountCollection() {
return jongo.getCollection("account");
}
@Override
public Account getAccount(String id) {
return this.getAccountCollection()
.findOne("{id:#}", id)
.projection("{tokens: 0, authorizedScopes: 0}")
.as(Account.class);
}
@Override
public UserAccount getUserAccountByEmail(String email) {
return this.getAccountCollection()
.findOne("{emailAddress:#}", email)
.projection("{tokens: 0, authorizedScopes: 0}")
.as(UserAccount.class);
}
@Override
public UserAccount getUserAccountById(String id) {
return this.getAccountCollection()
.findOne("{id:#}", id)
.projection("{tokens: 0, authorizedScopes: 0}")
.as(UserAccount.class);
}
@Override
public AgentAccount getAgentAccountById(String id) {
return getAccountCollection()
.findOne("{id:#}", id)
.projection("{tokens: 0, authorizedScopes: 0}")
.as(AgentAccount.class);
}
@Override
public AgentAccount createAgentAccount(String organizationId, AgentAccount agent) {
agent.setModified(System.currentTimeMillis());
agent.setOrganizationId(organizationId);
getAccountCollection().insert(agent);
return agent;
}
@Override
public boolean deleteAgentAccount(String agentId, long[] versions) throws InvalidVersionException {
WriteResult wr = getAccountCollection().remove("{id: #, modified: { $in: # } }", agentId, versions);
if (wr.getN() == 0) {
if (getAccountCollection().count("{ id: # }", agentId) != 0) {
throw new InvalidVersionException("agentaccount", agentId);
}
return false;
}
return true;
}
@Override
public void deleteAgentAccountsFromOrganization(String organizationId) {
getAccountCollection().remove("{ organizationId: # }", organizationId);
}
@Override
public Iterable<AgentAccount> getAgentsForOrganization(String organizationId, int start, int limit) {
return getAccountCollection()
.find("{ organizationId: # }", organizationId)
.projection("{tokens: 0, authorizedScopes: 0}")
.skip(start)
.limit(limit)
.as(AgentAccount.class);
}
@Override
public AgentAccount findAndRemove(String agentId, long[] versions) throws InvalidVersionException {
AgentAccount res = getAccountCollection()
.findAndModify("{id: #, modified: { $in: # } }", agentId, versions)
.projection("{tokens: 0, authorizedScopes: 0}")
.remove()
.as(AgentAccount.class);
if (res == null) {
if (getAccountCollection().count("{ id: # }", agentId) != 0) {
throw new InvalidVersionException("agentaccount", agentId);
}
}
return res;
}
@Override
public void updatePassword(String accountId, String passwordHash, String passwordSalt) {
WriteResult writeResult = getAccountCollection()
.update("{ id: # }", accountId)
.with("{ $set: { password: #, passwordSalt: # } }", passwordHash, passwordSalt);
if (writeResult.getN() > 1) {
logger.error("More than one account provider with id: {}", accountId);
} else if (writeResult.getN() < 1) {
logger.error("The account {} doesn't exist.", accountId);
}
}
}
| oasis-webapp/src/main/java/oasis/jongo/accounts/JongoAccountRepository.java | package oasis.jongo.accounts;
import javax.inject.Inject;
import org.jongo.Jongo;
import org.jongo.MongoCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.WriteResult;
import oasis.model.InvalidVersionException;
import oasis.model.accounts.Account;
import oasis.model.accounts.AccountRepository;
import oasis.model.accounts.AgentAccount;
import oasis.model.accounts.UserAccount;
public class JongoAccountRepository implements AccountRepository {
private static final Logger logger = LoggerFactory.getLogger(JongoAccountRepository.class);
private final Jongo jongo;
@Inject
JongoAccountRepository(Jongo jongo) {
this.jongo = jongo;
}
protected MongoCollection getAccountCollection() {
return jongo.getCollection("account");
}
@Override
public Account getAccount(String id) {
return this.getAccountCollection().findOne("{id:#}", id).as(Account.class);
}
@Override
public UserAccount getUserAccountByEmail(String email) {
return this.getAccountCollection().findOne("{emailAddress:#}", email).as(UserAccount.class);
}
@Override
public UserAccount getUserAccountById(String id) {
return this.getAccountCollection().findOne("{id:#}", id).as(UserAccount.class);
}
@Override
public AgentAccount getAgentAccountById(String id) {
return getAccountCollection().findOne("{id:#}", id).as(AgentAccount.class);
}
@Override
public AgentAccount createAgentAccount(String organizationId, AgentAccount agent) {
agent.setModified(System.currentTimeMillis());
agent.setOrganizationId(organizationId);
getAccountCollection().insert(agent);
return agent;
}
@Override
public boolean deleteAgentAccount(String agentId, long[] versions) throws InvalidVersionException {
WriteResult wr = getAccountCollection().remove("{id: #, modified: { $in: # } }", agentId, versions);
if (wr.getN() == 0) {
if (getAccountCollection().count("{ id: # }", agentId) != 0) {
throw new InvalidVersionException("agentaccount", agentId);
}
return false;
}
return true;
}
@Override
public void deleteAgentAccountsFromOrganization(String organizationId) {
getAccountCollection().remove("{ organizationId: # }", organizationId);
}
@Override
public Iterable<AgentAccount> getAgentsForOrganization(String organizationId, int start, int limit) {
return getAccountCollection()
.find("{ organizationId: # }", organizationId)
.skip(start)
.limit(limit)
.as(AgentAccount.class);
}
@Override
public AgentAccount findAndRemove(String agentId, long[] versions) throws InvalidVersionException {
AgentAccount res = getAccountCollection()
.findAndModify("{id: #, modified: { $in: # } }", agentId, versions)
.remove()
.as(AgentAccount.class);
if (res == null) {
if (getAccountCollection().count("{ id: # }", agentId) != 0) {
throw new InvalidVersionException("agentaccount", agentId);
}
}
return res;
}
@Override
public void updatePassword(String accountId, String passwordHash, String passwordSalt) {
WriteResult writeResult = getAccountCollection()
.update("{ id: # }", accountId)
.with("{ $set: { password: #, passwordSalt: # } }", passwordHash, passwordSalt);
if (writeResult.getN() > 1) {
logger.error("More than one account provider with id: {}", accountId);
} else if (writeResult.getN() < 1) {
logger.error("The account {} doesn't exist.", accountId);
}
}
}
| Make sure we never retrieve tokens and authorized scopes within accounts
Change-Id: I166579305d867ca5f2fa94efa447ed3378d7a5b0
| oasis-webapp/src/main/java/oasis/jongo/accounts/JongoAccountRepository.java | Make sure we never retrieve tokens and authorized scopes within accounts | <ide><path>asis-webapp/src/main/java/oasis/jongo/accounts/JongoAccountRepository.java
<ide>
<ide> @Override
<ide> public Account getAccount(String id) {
<del> return this.getAccountCollection().findOne("{id:#}", id).as(Account.class);
<add> return this.getAccountCollection()
<add> .findOne("{id:#}", id)
<add> .projection("{tokens: 0, authorizedScopes: 0}")
<add> .as(Account.class);
<ide> }
<ide>
<ide> @Override
<ide> public UserAccount getUserAccountByEmail(String email) {
<del> return this.getAccountCollection().findOne("{emailAddress:#}", email).as(UserAccount.class);
<add> return this.getAccountCollection()
<add> .findOne("{emailAddress:#}", email)
<add> .projection("{tokens: 0, authorizedScopes: 0}")
<add> .as(UserAccount.class);
<ide> }
<ide>
<ide> @Override
<ide> public UserAccount getUserAccountById(String id) {
<del> return this.getAccountCollection().findOne("{id:#}", id).as(UserAccount.class);
<add> return this.getAccountCollection()
<add> .findOne("{id:#}", id)
<add> .projection("{tokens: 0, authorizedScopes: 0}")
<add> .as(UserAccount.class);
<ide> }
<ide>
<ide> @Override
<ide> public AgentAccount getAgentAccountById(String id) {
<del> return getAccountCollection().findOne("{id:#}", id).as(AgentAccount.class);
<add> return getAccountCollection()
<add> .findOne("{id:#}", id)
<add> .projection("{tokens: 0, authorizedScopes: 0}")
<add> .as(AgentAccount.class);
<ide> }
<ide>
<ide> @Override
<ide> public Iterable<AgentAccount> getAgentsForOrganization(String organizationId, int start, int limit) {
<ide> return getAccountCollection()
<ide> .find("{ organizationId: # }", organizationId)
<add> .projection("{tokens: 0, authorizedScopes: 0}")
<ide> .skip(start)
<ide> .limit(limit)
<ide> .as(AgentAccount.class);
<ide> public AgentAccount findAndRemove(String agentId, long[] versions) throws InvalidVersionException {
<ide> AgentAccount res = getAccountCollection()
<ide> .findAndModify("{id: #, modified: { $in: # } }", agentId, versions)
<add> .projection("{tokens: 0, authorizedScopes: 0}")
<ide> .remove()
<ide> .as(AgentAccount.class);
<ide> if (res == null) { |
|
JavaScript | mit | d6c6dbf3a41e6efb4dc57c73d1ca8a12d196abd5 | 0 | exponentjs/exp | import inquirerAsync from 'inquirer-async';
import {
Api,
Exp,
} from 'xdl';
import _ from 'lodash';
import log from '../log';
async function action(projectDir, options) {
let validatedOptions = {};
let templateType;
let questions = [];
if (options.projectName) {
validatedOptions.name = options.projectName;
} else {
questions.push({
type: 'input',
name: 'name',
message: 'Project name',
validate(val) {
// TODO: Validate
return val.length > 0;
},
});
}
if (options.projectType) {
templateType = options.projectType;
} else {
let versions = await Api.versionsAsync();
let templateIds = _.map(versions.templates, (template) => `"${template.id}"`);
questions.push({
type: 'input',
name: 'type',
message: `Project type. Options are: ${templateIds.join(', ')}`,
validate(val) {
for (let i = 0; i < versions.templates.length; i++) {
if (versions.templates[i].id === val) {
return true;
}
}
return false;
},
});
}
if (questions.length > 0) {
var answers = await inquirerAsync.promptAsync(questions);
if (answers.name) {
validatedOptions.name = answers.name;
}
if (answers.type) {
templateType = answers.type;
}
}
let root = await Exp.createNewExpAsync(templateType, projectDir, {}, validatedOptions);
log(`Your project is ready at ${root}. Use "exp start ${root}" to get started.`);
}
export default (program) => {
program
.command('init [project-dir]')
.alias('i')
.description('Initializes a directory with an example project. Run it without any options and you will be prompted for the name and type.')
.option('-n, --projectName [name]', 'Specify a name for the new project')
.option('-t, --projectType [type]', 'Specify what type of template to use. Run without this option to see all choices.')
.asyncActionProjectDir(action, true); // pass true to skip validation
};
| src/commands/init.js | import inquirerAsync from 'inquirer-async';
import {
Api,
Exp,
} from 'xdl';
import _ from 'lodash';
import log from '../log';
async function action(projectDir, options) {
let validatedOptions = {};
let templateType;
let questions = [];
if (options.projectName) {
validatedOptions.name = options.projectName;
} else {
questions.push({
type: 'input',
name: 'name',
message: 'Project name',
validate(val) {
// TODO: Validate
return val.length > 0;
},
});
}
if (options.projectType) {
templateType = options.projectType;
} else {
let versions = await Api.versionsAsync();
let templateIds = _.map(versions.templates, (template) => `"${template.id}"`);
questions.push({
type: 'input',
name: 'type',
message: `Project type. Options are: ${templateIds.join(', ')}`,
validate(val) {
for (let i = 0; i < versions.templates.length; i++) {
if (versions.templates[i].id === val) {
return true;
}
}
return false;
},
});
}
if (questions.length > 0) {
var answers = await inquirerAsync.promptAsync(questions);
if (answers.name) {
validatedOptions.name = answers.name;
}
if (answers.type) {
templateType = answers.type;
}
}
let root = await Exp.createNewExpAsync(templateType, projectDir, {}, validatedOptions);
log(`Your project is ready at ${root}. Use "exp start ${root}" to get started.`);
}
export default (program) => {
program
.command('init [project-dir]')
.alias('i')
.description('Initializes a directory with an example project')
.option('-n, --projectName [name]', 'Specify a name for the new project')
.option('-t, --projectType [type]', 'Specify what type of template to use. Run without this options to see all choices.')
.asyncActionProjectDir(action, true); // pass true to skip validation
};
| Update exp init help entry
fbshipit-source-id: 5b50882
| src/commands/init.js | Update exp init help entry | <ide><path>rc/commands/init.js
<ide> program
<ide> .command('init [project-dir]')
<ide> .alias('i')
<del> .description('Initializes a directory with an example project')
<add> .description('Initializes a directory with an example project. Run it without any options and you will be prompted for the name and type.')
<ide> .option('-n, --projectName [name]', 'Specify a name for the new project')
<del> .option('-t, --projectType [type]', 'Specify what type of template to use. Run without this options to see all choices.')
<add> .option('-t, --projectType [type]', 'Specify what type of template to use. Run without this option to see all choices.')
<ide> .asyncActionProjectDir(action, true); // pass true to skip validation
<ide> }; |
|
Java | apache-2.0 | 06285143c04cb6bbdbadf4e886bfc263697a0467 | 0 | venusdrogon/feilong-spring | /*
* Copyright (C) 2008 feilong
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.feilong.spring.web.util;
import javax.servlet.ServletContext;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.Validate;
import org.springframework.context.ApplicationContext;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import org.springframework.web.context.support.WebApplicationContextUtils;
import org.springframework.web.servlet.DispatcherServlet;
import org.springframework.web.servlet.FrameworkServlet;
import org.springframework.web.servlet.support.RequestContextUtils;
/**
* {@link WebApplicationContextUtils} 工具类.
*
* <p>
* 当 Web应用集成 Spring容器后,代表 Spring 容器的 {@link WebApplicationContext} 对象将以{@link WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE}
* 为键存放在 {@link ServletContext} 属性列表中,具体参见 {@link org.springframework.web.context.ContextLoader#initWebApplicationContext(ServletContext)}
* </p>
*
* <h3>{@link WebApplicationContextUtils#getWebApplicationContext(ServletContext)}VS
* {@link RequestContextUtils#getWebApplicationContext(javax.servlet.ServletRequest)}:</h3>
* <blockquote>
*
* <p style="color:red">
* 注意: {@link WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE} 存放的是spring ApplicationContext而非 springmvc ApplicationContext
* </p>
* <p>
* {@link RequestContextUtils#getWebApplicationContext(javax.servlet.ServletRequest)}可以取到 springmvc ApplicationContext,他的原理是,每次
*
* {@link DispatcherServlet#doService(HttpServletRequest, HttpServletResponse)} 都会往request里面设置 key为
* {@link DispatcherServlet#WEB_APPLICATION_CONTEXT_ATTRIBUTE} 的属性,而此时的值 WebApplicationContext是通过
* {@link FrameworkServlet#initWebApplicationContext()} 初始化的; 具体参见 {@link FrameworkServlet#createWebApplicationContext(ApplicationContext)}
* ,可以明显看出spring ApplicationContext是 springmvc ApplicationContext 的parent
* </p>
* </blockquote>
*
*
* <h3>{@link WebApplicationContextUtils#getWebApplicationContext(ServletContext) getWebApplicationContext}VS
* {@link WebApplicationContextUtils#getRequiredWebApplicationContext(ServletContext) getRequiredWebApplicationContext}:</h3>
* <blockquote>
* <p>
* 当 ServletContext 属性列表中不存在 WebApplicationContext时:
* <ol>
* <li>{@link WebApplicationContextUtils#getWebApplicationContext(ServletContext)}方法不会抛出异常,它简单地返回 null, 如果后续代码直接访问返回的结果将引发一个
* NullPointerException 异常.</li>
* <li>而{@link WebApplicationContextUtils#getRequiredWebApplicationContext(ServletContext)}方法要求 ServletContext属性列表中一定要包含一个有效的
* WebApplicationContext对象,否则马上抛出一个 异常 {@link java.lang.IllegalStateException}.</li>
* </ol>
* 我们推荐使用后者,因为它能提前发现错误的时间,强制开发者搭建好必备的基础设施.
* </p>
* </blockquote>
*
* @author <a href="http://feitianbenyue.iteye.com/">feilong</a>
* @see org.springframework.web.context.WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE
* @see org.springframework.web.context.support.WebApplicationContextUtils#getWebApplicationContext(ServletContext)
* @since 1.0.4
*/
public final class WebSpringUtil{
/** Don't let anyone instantiate this class. */
private WebSpringUtil(){
//AssertionError不是必须的. 但它可以避免不小心在类的内部调用构造器. 保证该类在任何情况下都不会被实例化.
//see 《Effective Java》 2nd
throw new AssertionError("No " + getClass().getName() + " instances for you!");
}
/**
* 获得 request.
*
* <h3>说明:</h3>
* <blockquote>
* <p>
* spring 3中可以通过 {@link RequestContextHolder} 得到 {@link HttpServletRequest},但是得不到 {@link HttpServletResponse},具体参见
* {@link <a href="https://github.com/venusdrogon/feilong-spring/issues/6">WebSpringUtil.getResponse()方法获取到的response是null</a>}
* </p>
* </blockquote>
*
* @return the request
* @see <a href="http://www.cnblogs.com/softidea/p/6125087.html">Spring MVC的RequestContextHolder使用误区</a>
* @see <a href="http://www.cnblogs.com/mikevictor07/p/3436393.html">springMVC 中几种获取request和response的方式</a>
* @see <a href="http://www.programering.com/q/MDO3QjMwATY.html">How to obtain the HttpServletResponse Spring AOP?</a>
* @see RequestContextHolder#getRequestAttributes()
* @see ServletRequestAttributes#getRequest()
* @see org.springframework.web.servlet.mvc.method.annotation.ServletRequestMethodArgumentResolver
* @see org.springframework.web.util.WebUtils#getNativeRequest(ServletRequest, Class)
* @since 1.10.0
*/
public static HttpServletRequest getRequest(){
ServletRequestAttributes servletRequestAttributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
return servletRequestAttributes.getRequest();
}
//********************************************************************************
/**
* 普通类获得spring 注入的类方法.
*
* <p>
* 此方法底层调用的是 {@link RequestContextUtils#getWebApplicationContext(ServletRequest, ServletContext)} ,会从spingmvc 以及spring
* ApplicationContext 查找bean
* </p>
*
* @param <T>
* the generic type
* @param request
* request
* @param beanName
* xml文件中配置的bean beanName
* @return 先在servlet-specific {@link WebApplicationContext} 里面找 bean;<br>
* 如果没有,会在 global context里面找;<br>
* 如果在servlet-specific 或者 global context 都找不到,会抛出 {@link IllegalStateException}
* @see #getWebApplicationContext(HttpServletRequest)
* @see RequestContextUtils#getWebApplicationContext(ServletRequest, ServletContext)
*/
@SuppressWarnings("unchecked")
public static <T> T getBean(HttpServletRequest request,String beanName){
WebApplicationContext webApplicationContext = getWebApplicationContext(request);
return (T) getBean(webApplicationContext, beanName);
}
/**
* Gets the bean.
*
* <p>
* 此方法底层调用的是 {@link RequestContextUtils#getWebApplicationContext(ServletRequest,
* ServletContext)} ,会从spingmvc 以及spring ApplicationContext 查找bean
* </p>
*
* @param <T>
* the generic type
* @param request
* the request
* @param requiredType
* the required type
* @return 先在servlet-specific {@link WebApplicationContext} 里面找 bean;<br>
* 如果没有,会在 global context里面找;<br>
* 如果在servlet-specific 或者 global context 都找不到,会抛出 {@link IllegalStateException}
* @see #getWebApplicationContext(HttpServletRequest)
* @see RequestContextUtils#getWebApplicationContext(ServletRequest, ServletContext)
*/
public static <T> T getBean(HttpServletRequest request,Class<T> requiredType){
WebApplicationContext webApplicationContext = getWebApplicationContext(request);
return getBean(webApplicationContext, requiredType);
}
//********************************************************************************************
/**
* 普通类获得spring 注入的类方法<br>
* 注意:<b>(如果找不到bean,返回null)</b>.
*
* @param <T>
* the generic type
* @param servletContext
* servletContext
* @param beanName
* xml文件中配置的bean beanName
* @return 注入的bean
* @see #getWebApplicationContext(ServletContext)
*/
@SuppressWarnings("unchecked")
public static <T> T getBean(ServletContext servletContext,String beanName){
WebApplicationContext webApplicationContext = getWebApplicationContext(servletContext);
return (T) getBean(webApplicationContext, beanName);
}
/**
* Gets the bean.
*
* @param <T>
* the generic type
* @param servletContext
* the servlet context
* @param requiredType
* the required type
* @return the bean
*/
public static <T> T getBean(ServletContext servletContext,Class<T> requiredType){
WebApplicationContext webApplicationContext = getWebApplicationContext(servletContext);
return getBean(webApplicationContext, requiredType);
}
//********************************************************************************************
/**
* Gets the required bean.
*
* @param <T>
* the generic type
* @param servletContext
* the servlet context
* @param beanName
* the bean name
* @return the required bean
* @see #getRequiredWebApplicationContext(ServletContext)
*/
public static <T> T getRequiredBean(ServletContext servletContext,String beanName){
WebApplicationContext webApplicationContext = getRequiredWebApplicationContext(servletContext);
return getBean(webApplicationContext, beanName);
}
/**
* Gets the required bean.
*
* @param <T>
* the generic type
* @param servletContext
* the servlet context
* @param requiredType
* the required type
* @return the required bean
* @see #getRequiredWebApplicationContext(ServletContext)
*/
public static <T> T getRequiredBean(ServletContext servletContext,Class<T> requiredType){
WebApplicationContext webApplicationContext = getRequiredWebApplicationContext(servletContext);
return getBean(webApplicationContext, requiredType);
}
//*******************************************************************************************
/**
* Gets the bean.
*
* @param <T>
* the generic type
* @param applicationContext
* the application context
* @param beanName
* the bean name
* @return NoSuchBeanDefinitionException - if there is no bean definition with the specified name
*/
@SuppressWarnings("unchecked")
private static <T> T getBean(ApplicationContext applicationContext,String beanName){
return (T) applicationContext.getBean(beanName);
}
/**
* Gets the bean.
*
* @param <T>
* the generic type
* @param applicationContext
* the application context
* @param requiredType
* the required type
* @return the bean
*/
private static <T> T getBean(ApplicationContext applicationContext,Class<T> requiredType){
return applicationContext.getBean(requiredType);
}
//*******************************************************************************************
/**
* Find the root {@link WebApplicationContext} for this web app, typically loaded via
* {@link org.springframework.web.context.ContextLoaderListener}.
* <p>
* Will rethrow an exception that happened on root context startup,
* to differentiate between a failed context startup and no context at all.
*
* @param servletContext
* the servlet context
* @return the root WebApplicationContext for this web app, or {@code null} if none
* @see org.springframework.web.context.WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE
* @see org.springframework.web.context.support.WebApplicationContextUtils#getWebApplicationContext(ServletContext)
* @since 1.1.1
*/
public static WebApplicationContext getWebApplicationContext(ServletContext servletContext){
return WebApplicationContextUtils.getWebApplicationContext(servletContext);
}
/**
* 获得 web application context.
*
* @param servletContext
* the servlet context
* @return the web application context
* @see org.springframework.web.context.WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE
* @see org.springframework.web.context.support.WebApplicationContextUtils#getRequiredWebApplicationContext(ServletContext)
* @since 1.2.0
*/
public static WebApplicationContext getRequiredWebApplicationContext(ServletContext servletContext){
return WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext);
}
/**
* 获得 web application context.
*
* <p>
* 此方法可以得到springmvc 的bean
* </p>
*
* @param request
* the request
* @return 如果有 servlet-specific WebApplicationContext那么返回;<br>
* 否则找 global context; <br>
* 两个都没有 会抛出 IllegalStateException
* @see org.springframework.web.servlet.support.RequestContextUtils#getWebApplicationContext(ServletRequest)
* @since 1.5.3
*/
public static WebApplicationContext getWebApplicationContext(HttpServletRequest request){
Validate.notNull(request, "request can't be null!");
//Gets the servlet context to which this ServletRequest was last dispatched.
//since Servlet 3.0
ServletContext servletContext = request.getServletContext();
Validate.notNull(servletContext, "servletContext can't be null!,request class is:[%s]", request.getClass().getName());
//内部调用了 WebApplicationContextUtils.getRequiredWebApplicationContext(ServletContext)
return RequestContextUtils.getWebApplicationContext(request, servletContext);
}
}
| feilong-spring-web/src/main/java/com/feilong/spring/web/util/WebSpringUtil.java | /*
* Copyright (C) 2008 feilong
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.feilong.spring.web.util;
import javax.servlet.ServletContext;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.context.ApplicationContext;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import org.springframework.web.context.support.WebApplicationContextUtils;
import org.springframework.web.servlet.DispatcherServlet;
import org.springframework.web.servlet.FrameworkServlet;
import org.springframework.web.servlet.support.RequestContextUtils;
/**
* {@link WebApplicationContextUtils} 工具类.
*
* <p>
* 当 Web应用集成 Spring容器后,代表 Spring 容器的 {@link WebApplicationContext} 对象将以{@link WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE}
* 为键存放在 {@link ServletContext} 属性列表中,具体参见 {@link org.springframework.web.context.ContextLoader#initWebApplicationContext(ServletContext)}
* </p>
*
* <h3>{@link WebApplicationContextUtils#getWebApplicationContext(ServletContext)}VS
* {@link RequestContextUtils#getWebApplicationContext(javax.servlet.ServletRequest)}:</h3>
* <blockquote>
*
* <p style="color:red">
* 注意: {@link WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE} 存放的是spring ApplicationContext而非 springmvc ApplicationContext
* </p>
* <p>
* {@link RequestContextUtils#getWebApplicationContext(javax.servlet.ServletRequest)}可以取到 springmvc ApplicationContext,他的原理是,每次
*
* {@link DispatcherServlet#doService(HttpServletRequest, HttpServletResponse)} 都会往request里面设置 key为
* {@link DispatcherServlet#WEB_APPLICATION_CONTEXT_ATTRIBUTE} 的属性,而此时的值 WebApplicationContext是通过
* {@link FrameworkServlet#initWebApplicationContext()} 初始化的; 具体参见 {@link FrameworkServlet#createWebApplicationContext(ApplicationContext)}
* ,可以明显看出spring ApplicationContext是 springmvc ApplicationContext 的parent
* </p>
* </blockquote>
*
*
* <h3>{@link WebApplicationContextUtils#getWebApplicationContext(ServletContext) getWebApplicationContext}VS
* {@link WebApplicationContextUtils#getRequiredWebApplicationContext(ServletContext) getRequiredWebApplicationContext}:</h3>
* <blockquote>
* <p>
* 当 ServletContext 属性列表中不存在 WebApplicationContext时:
* <ol>
* <li>{@link WebApplicationContextUtils#getWebApplicationContext(ServletContext)}方法不会抛出异常,它简单地返回 null, 如果后续代码直接访问返回的结果将引发一个
* NullPointerException 异常.</li>
* <li>而{@link WebApplicationContextUtils#getRequiredWebApplicationContext(ServletContext)}方法要求 ServletContext属性列表中一定要包含一个有效的
* WebApplicationContext对象,否则马上抛出一个 异常 {@link java.lang.IllegalStateException}.</li>
* </ol>
* 我们推荐使用后者,因为它能提前发现错误的时间,强制开发者搭建好必备的基础设施.
* </p>
* </blockquote>
*
* @author <a href="http://feitianbenyue.iteye.com/">feilong</a>
* @see org.springframework.web.context.WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE
* @see org.springframework.web.context.support.WebApplicationContextUtils#getWebApplicationContext(ServletContext)
* @since 1.0.4
*/
public final class WebSpringUtil{
/** Don't let anyone instantiate this class. */
private WebSpringUtil(){
//AssertionError不是必须的. 但它可以避免不小心在类的内部调用构造器. 保证该类在任何情况下都不会被实例化.
//see 《Effective Java》 2nd
throw new AssertionError("No " + getClass().getName() + " instances for you!");
}
/**
* 获得 request.
*
* <h3>说明:</h3>
* <blockquote>
* <p>
* spring 3中可以通过 {@link RequestContextHolder} 得到 {@link HttpServletRequest},但是得不到 {@link HttpServletResponse},具体参见
* {@link <a href="https://github.com/venusdrogon/feilong-spring/issues/6">WebSpringUtil.getResponse()方法获取到的response是null</a>}
* </p>
* </blockquote>
*
* @return the request
* @see <a href="http://www.cnblogs.com/softidea/p/6125087.html">Spring MVC的RequestContextHolder使用误区</a>
* @see <a href="http://www.cnblogs.com/mikevictor07/p/3436393.html">springMVC 中几种获取request和response的方式</a>
* @see <a href="http://www.programering.com/q/MDO3QjMwATY.html">How to obtain the HttpServletResponse Spring AOP?</a>
* @see RequestContextHolder#getRequestAttributes()
* @see ServletRequestAttributes#getRequest()
* @see org.springframework.web.servlet.mvc.method.annotation.ServletRequestMethodArgumentResolver
* @see org.springframework.web.util.WebUtils#getNativeRequest(ServletRequest, Class)
* @since 1.10.0
*/
public static HttpServletRequest getRequest(){
ServletRequestAttributes servletRequestAttributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
return servletRequestAttributes.getRequest();
}
//********************************************************************************
/**
* 普通类获得spring 注入的类方法.
*
* <p>
* 此方法底层调用的是 {@link RequestContextUtils#getWebApplicationContext(ServletRequest, ServletContext)} ,会从spingmvc 以及spring
* ApplicationContext 查找bean
* </p>
*
* @param <T>
* the generic type
* @param request
* request
* @param beanName
* xml文件中配置的bean beanName
* @return 先在servlet-specific {@link WebApplicationContext} 里面找 bean;<br>
* 如果没有,会在 global context里面找;<br>
* 如果在servlet-specific 或者 global context 都找不到,会抛出 {@link IllegalStateException}
* @see #getWebApplicationContext(HttpServletRequest)
* @see RequestContextUtils#getWebApplicationContext(ServletRequest, ServletContext)
*/
@SuppressWarnings("unchecked")
public static <T> T getBean(HttpServletRequest request,String beanName){
WebApplicationContext webApplicationContext = getWebApplicationContext(request);
return (T) getBean(webApplicationContext, beanName);
}
/**
* Gets the bean.
*
* <p>
* 此方法底层调用的是 {@link RequestContextUtils#getWebApplicationContext(ServletRequest,
* ServletContext)} ,会从spingmvc 以及spring ApplicationContext 查找bean
* </p>
*
* @param <T>
* the generic type
* @param request
* the request
* @param requiredType
* the required type
* @return 先在servlet-specific {@link WebApplicationContext} 里面找 bean;<br>
* 如果没有,会在 global context里面找;<br>
* 如果在servlet-specific 或者 global context 都找不到,会抛出 {@link IllegalStateException}
* @see #getWebApplicationContext(HttpServletRequest)
* @see RequestContextUtils#getWebApplicationContext(ServletRequest, ServletContext)
*/
public static <T> T getBean(HttpServletRequest request,Class<T> requiredType){
WebApplicationContext webApplicationContext = getWebApplicationContext(request);
return getBean(webApplicationContext, requiredType);
}
//********************************************************************************************
/**
* 普通类获得spring 注入的类方法<br>
* 注意:<b>(如果找不到bean,返回null)</b>.
*
* @param <T>
* the generic type
* @param servletContext
* servletContext
* @param beanName
* xml文件中配置的bean beanName
* @return 注入的bean
* @see #getWebApplicationContext(ServletContext)
*/
@SuppressWarnings("unchecked")
public static <T> T getBean(ServletContext servletContext,String beanName){
WebApplicationContext webApplicationContext = getWebApplicationContext(servletContext);
return (T) getBean(webApplicationContext, beanName);
}
/**
* Gets the bean.
*
* @param <T>
* the generic type
* @param servletContext
* the servlet context
* @param requiredType
* the required type
* @return the bean
*/
public static <T> T getBean(ServletContext servletContext,Class<T> requiredType){
WebApplicationContext webApplicationContext = getWebApplicationContext(servletContext);
return getBean(webApplicationContext, requiredType);
}
//********************************************************************************************
/**
* Gets the required bean.
*
* @param <T>
* the generic type
* @param servletContext
* the servlet context
* @param beanName
* the bean name
* @return the required bean
* @see #getRequiredWebApplicationContext(ServletContext)
*/
public static <T> T getRequiredBean(ServletContext servletContext,String beanName){
WebApplicationContext webApplicationContext = getRequiredWebApplicationContext(servletContext);
return getBean(webApplicationContext, beanName);
}
/**
* Gets the required bean.
*
* @param <T>
* the generic type
* @param servletContext
* the servlet context
* @param requiredType
* the required type
* @return the required bean
* @see #getRequiredWebApplicationContext(ServletContext)
*/
public static <T> T getRequiredBean(ServletContext servletContext,Class<T> requiredType){
WebApplicationContext webApplicationContext = getRequiredWebApplicationContext(servletContext);
return getBean(webApplicationContext, requiredType);
}
//*******************************************************************************************
/**
* Gets the bean.
*
* @param <T>
* the generic type
* @param applicationContext
* the application context
* @param beanName
* the bean name
* @return NoSuchBeanDefinitionException - if there is no bean definition with the specified name
*/
@SuppressWarnings("unchecked")
private static <T> T getBean(ApplicationContext applicationContext,String beanName){
return (T) applicationContext.getBean(beanName);
}
/**
* Gets the bean.
*
* @param <T>
* the generic type
* @param applicationContext
* the application context
* @param requiredType
* the required type
* @return the bean
*/
private static <T> T getBean(ApplicationContext applicationContext,Class<T> requiredType){
return applicationContext.getBean(requiredType);
}
//*******************************************************************************************
/**
* Find the root {@link WebApplicationContext} for this web app, typically loaded via
* {@link org.springframework.web.context.ContextLoaderListener}.
* <p>
* Will rethrow an exception that happened on root context startup,
* to differentiate between a failed context startup and no context at all.
*
* @param servletContext
* the servlet context
* @return the root WebApplicationContext for this web app, or {@code null} if none
* @see org.springframework.web.context.WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE
* @see org.springframework.web.context.support.WebApplicationContextUtils#getWebApplicationContext(ServletContext)
* @since 1.1.1
*/
public static WebApplicationContext getWebApplicationContext(ServletContext servletContext){
return WebApplicationContextUtils.getWebApplicationContext(servletContext);
}
/**
* 获得 web application context.
*
* @param servletContext
* the servlet context
* @return the web application context
* @see org.springframework.web.context.WebApplicationContext#ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE
* @see org.springframework.web.context.support.WebApplicationContextUtils#getRequiredWebApplicationContext(ServletContext)
* @since 1.2.0
*/
public static WebApplicationContext getRequiredWebApplicationContext(ServletContext servletContext){
return WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext);
}
/**
* 获得 web application context.
*
* <p>
* 此方法可以得到springmvc 的bean
* </p>
*
* @param request
* the request
* @return 如果有 servlet-specific WebApplicationContext那么返回;<br>
* 否则找 global context; <br>
* 两个都没有 会抛出 IllegalStateException
* @see org.springframework.web.servlet.support.RequestContextUtils#getWebApplicationContext(ServletRequest)
* @since 1.5.3
*/
public static WebApplicationContext getWebApplicationContext(HttpServletRequest request){
//内部调用了 WebApplicationContextUtils.getRequiredWebApplicationContext(ServletContext)
return RequestContextUtils.getWebApplicationContext(request, request.getServletContext());
}
}
| 提高 WebSpringUtil.getWebApplicationContext(HttpServletRequest) 健壮性 fix
#28 | feilong-spring-web/src/main/java/com/feilong/spring/web/util/WebSpringUtil.java | 提高 WebSpringUtil.getWebApplicationContext(HttpServletRequest) 健壮性 fix #28 | <ide><path>eilong-spring-web/src/main/java/com/feilong/spring/web/util/WebSpringUtil.java
<ide> import javax.servlet.http.HttpServletRequest;
<ide> import javax.servlet.http.HttpServletResponse;
<ide>
<add>import org.apache.commons.lang3.Validate;
<ide> import org.springframework.context.ApplicationContext;
<ide> import org.springframework.web.context.WebApplicationContext;
<ide> import org.springframework.web.context.request.RequestContextHolder;
<ide> * @since 1.5.3
<ide> */
<ide> public static WebApplicationContext getWebApplicationContext(HttpServletRequest request){
<add> Validate.notNull(request, "request can't be null!");
<add>
<add> //Gets the servlet context to which this ServletRequest was last dispatched.
<add> //since Servlet 3.0
<add> ServletContext servletContext = request.getServletContext();
<add> Validate.notNull(servletContext, "servletContext can't be null!,request class is:[%s]", request.getClass().getName());
<add>
<ide> //内部调用了 WebApplicationContextUtils.getRequiredWebApplicationContext(ServletContext)
<del> return RequestContextUtils.getWebApplicationContext(request, request.getServletContext());
<add> return RequestContextUtils.getWebApplicationContext(request, servletContext);
<ide> }
<ide> } |
|
JavaScript | agpl-3.0 | a611be7c972a9392701d0efb2fd5488a33c60f07 | 0 | FoxelSA/freepano,luxigo/freepano,FoxelSA/freepano,FoxelSA/freepano,luxigo/freepano,FoxelSA/freepano | /*
* freepano - WebGL panorama viewer
*
* Copyright (c) 2014 FOXEL SA - http://foxel.ch
* Please read <http://foxel.ch/license> for more information.
*
*
* Author(s):
*
* Alexandre Kraft <[email protected]>
*
*
* Contributor(s):
*
* Nils Hamel <[email protected]>
*
*
* This file is part of the FOXEL project <http://foxel.ch>.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* Additional Terms:
*
* You are required to preserve legal notices and author attributions in
* that material or in the Appropriate Legal Notices displayed by works
* containing it.
*
* You are required to attribute the work as explained in the "Usage and
* Attribution" section of <http://foxel.ch/license>.
*/
/**
* Controls constructor
*/
function Controls(options) {
if (!(this instanceof Controls))
return new Controls(options);
$.extend(true,this,this.defaults,options);
this.init();
}
/**
* Extends Controls prototype
*/
$.extend(true,Controls.prototype, {
// default values
defaults: {
// keyboard
keyboard: {
move: {
active: false,
step: 0.5
},
zoom: {
active: false,
step: null // value, or [null] meaning the same as panorama.camera.zoom.step
}
},
// device motion
devicemotion: {
move: {
active: false
},
internal: {
ticks: {
nth: 5,
count: 0, // [auto]
time: 0 // [auto]
},
orientation: {
lon: 0, // [auto]
lat: 0 // [auto]
},
gravity: {
aligned: false, // [auto]
sign: 1 // [auto] -1/+1 following device orientation
}
}
}
},
// init() method
init: function controls_init() {
var controls = this;
// orientation
controls.orientation_detect();
$(window).on('resize', function(e) {
controls.orientation_detect();
});
// keyboard
controls._init_keyboard();
// devicemotion
controls._init_devicemotion();
// callback!
controls.callback();
},
// panorama_init() method
panorama_init: Panorama.prototype.init,
// orientation
orientation: {
portrait: false,
landscape: true,
},
// orientation_detect() method
orientation_detect: function() {
this.orientation.portrait = ($(window).width() < $(window).height());
this.orientation.landscape = !this.orientation.portrait;
},
// gravity_alignment() method
gravity_alignment: function() {
this.devicemotion.internal.orientation.lon = 0; //todo!
this.devicemotion.internal.orientation.lat = 0; //todo!
// gravity set? todo!
this.devicemotion.internal.gravity.aligned = true;
},
// [private] _init_keyboard() method
_init_keyboard: function() {
var controls = this;
// keyboard move
if (controls.keyboard.move.active)
controls._register_keyboard_move(controls);
// keyboard zoom
if (controls.keyboard.zoom.active)
controls._register_keyboard_zoom(controls);
// watch keyboard move properties
watch(controls.keyboard.move,['active'], function() {
if (controls.keyboard.move.active)
controls._register_keyboard_move(controls);
else
controls._unregister_keyboard_move(controls);
});
// watch keyboard zoom properties
watch(controls.keyboard.zoom,['active'], function() {
if (controls.keyboard.zoom.active)
controls._register_keyboard_zoom(controls);
else
controls._unregister_keyboard_zoom(controls);
});
},
// [private] _register_keyboard_move() method
_register_keyboard_move: function(controls) {
$(document).on('keydown',{controls: controls},controls._keyboard_move);
},
// [private] _unregister_keyboard_move() method
_unregister_keyboard_move: function(controls) {
$(document).off('keydown',controls._keyboard_move);
},
// [private] _register_keyboard_zoom() method
_register_keyboard_zoom: function(controls) {
$(document).on('keydown',{controls: controls},controls._keyboard_zoom);
},
// [private] _unregister_keyboard_zoom() method
_unregister_keyboard_zoom: function(controls) {
$(document).off('keydown',controls._keyboard_zoom);
},
// [private] _keyboard_move() method
_keyboard_move: function(e) {
var controls = e.data.controls;
if (!controls.keyboard.move.active)
return;
var needDrawScene = true;
var moveStep = controls.keyboard.move.step;
// move
switch(e.keyCode) {
case 37: // arrow left
controls.panorama.lon -= moveStep;
break;
case 38: // arrow top
controls.panorama.lat -= moveStep;
break;
case 39: // arrow right
controls.panorama.lon += moveStep;
break;
case 40: // arrow bottom
controls.panorama.lat += moveStep;
break;
default:
needDrawScene = false;
}
// update
if (needDrawScene)
controls.panorama.drawScene();
},
// [private] _keyboard_zoom() method
_keyboard_zoom: function(e) {
var controls = e.data.controls;
if (!controls.keyboard.zoom.active)
return;
var needZoomUpdate = true;
var zoomStep = controls.keyboard.zoom.step == null ?
controls.panorama.camera.zoom.step : controls.keyboard.zoom.step;
// zoom
switch(e.keyCode) {
case 107: // [-] key
controls.panorama.camera.zoom.current -= zoomStep;
break;
case 109: // [+] key
controls.panorama.camera.zoom.current += zoomStep;
break;
default:
needZoomUpdate = false;
}
// update
if (needZoomUpdate)
controls.panorama.zoomUpdate();
},
// [private] _init_devicemotion() method
_init_devicemotion: function() {
var controls = this;
// devicemotion move
if (controls.devicemotion.move.active)
controls._register_devicemotion_move(controls);
// watch devicemotion move properties
watch(controls.devicemotion.move,['active'], function() {
if (controls.devicemotion.move.active)
controls._register_devicemotion_move(controls);
else
controls._unregister_devicemotion_move(controls);
});
},
// [private] _register_devicemotion_move() method
_register_devicemotion_move: function(controls) {
// pass controls
window._controls_devicemotion = controls;
// html5 device motion
if (window.DeviceMotionEvent)
window.addEventListener('devicemotion',controls._device_move_by_device_motion,false);
},
// [private] _unregister_devicemotion_move() method
_unregister_devicemotion_move: function(controls) {
// motion
if (window.DeviceMotionEvent)
window.removeEventListener('devicemotion',controls._device_move_by_device_motion,false);
// reset time
controls.devicemotion.internal.ticks.time = 0;
// clear controls
window._controls_devicemotion = null;
},
// [private] _device_move_by_device_motion() method
_device_move_by_device_motion: function(e) {
var controls = window._controls_devicemotion;
if (!controls.devicemotion.move.active)
return;
// first tick
if (controls.devicemotion.internal.ticks.time == 0) {
// init time
controls.devicemotion.internal.ticks.time = (new Date()).getTime();
// gravity alignment
controls.gravity_alignment();
return;
}
// check for gravity alignment
if (!controls.devicemotion.internal.gravity.aligned)
return;
// time
var now = (new Date()).getTime();
var elapsed = (now - controls.devicemotion.internal.ticks.time) / 1000;
// original orientation
var lon = controls.devicemotion.internal.orientation.lon;
var lat = controls.devicemotion.internal.orientation.lat;
// panorama orientation per device orientation
if (controls.orientation.portrait) {
lon -= controls.devicemotion.internal.gravity.sign * e.rotationRate.beta * elapsed;
lat -= controls.devicemotion.internal.gravity.sign * e.rotationRate.alpha * elapsed;
} else {
lon += controls.devicemotion.internal.gravity.sign * e.rotationRate.alpha * elapsed;
lat -= controls.devicemotion.internal.gravity.sign * e.rotationRate.beta * elapsed;
}
// assign orientation
controls.panorama.lon = lon;
controls.panorama.lat = lat;
controls.devicemotion.internal.orientation.lon = lon;
controls.devicemotion.internal.orientation.lat = lat;
// store time
controls.devicemotion.internal.ticks.time = now;
// limit ticks rate
controls.devicemotion.internal.ticks.count++;
if (controls.devicemotion.internal.ticks.count <= controls.devicemotion.internal.ticks.nth)
return;
else
controls.devicemotion.internal.ticks.count = 0;
// moved? todo!
var needDrawScene = true;
// draw scene
if (needDrawScene)
controls.panorama.drawScene();
}
});
/**
* Extends Panorama prototype
*/
$.extend(Panorama.prototype, {
// init() method
init: function panorama_init() {
var panorama = this;
// controls defined in freepano options
if (typeof panorama.controls !== 'undefined') {
if (!(panorama.controls instanceof Controls)) {
// convert options to instanciated class
panorama.controls = new Controls($.extend(true,{
panorama: panorama,
callback: function() {
Controls.prototype.panorama_init.call(panorama);
}
},panorama.controls));
}
} else {
Controls.prototype.panorama_init.call(panorama);
}
}
});
| js/jquery.freepano.controls.js | /*
* freepano - WebGL panorama viewer
*
* Copyright (c) 2014 FOXEL SA - http://foxel.ch
* Please read <http://foxel.ch/license> for more information.
*
*
* Author(s):
*
* Alexandre Kraft <[email protected]>
*
*
* This file is part of the FOXEL project <http://foxel.ch>.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* Additional Terms:
*
* You are required to preserve legal notices and author attributions in
* that material or in the Appropriate Legal Notices displayed by works
* containing it.
*
* You are required to attribute the work as explained in the "Usage and
* Attribution" section of <http://foxel.ch/license>.
*/
/**
* Controls constructor
*/
function Controls(options) {
if (!(this instanceof Controls))
return new Controls(options);
$.extend(true,this,this.defaults,options);
this.init();
}
/**
* Extends Controls prototype
*/
$.extend(true,Controls.prototype, {
// default values
defaults: {
// keyboard
keyboard: {
move: {
active: false,
step: 0.5
},
zoom: {
active: false,
step: null // value, or [null] same as panorama.camera.zoom.step
}
},
// device motion
devicemotion: {
nth: 5, // limit event action to nth ticks
move: {
active: false,
noise: 5,
sensivity: 10
}
}
},
// init() method
init: function controls_init() {
var controls = this;
// orientation
controls.orientation_detect();
$(window).on('resize', function(e) {
controls.orientation_detect();
});
// keyboard
controls._init_keyboard();
// devicemotion
controls._init_devicemotion();
// callback!
controls.callback();
},
// panorama_init() method
panorama_init: Panorama.prototype.init,
// orientation
orientation: {
portrait: false,
landscape: true,
},
// orientation_detect() method
orientation_detect: function() {
this.orientation.portrait = ($(window).width() < $(window).height());
this.orientation.landscape = !this.orientation.portrait;
},
// [private] _init_keyboard() method
_init_keyboard: function() {
var controls = this;
// keyboard move
if (controls.keyboard.move.active)
controls._register_keyboard_move(controls);
// keyboard zoom
if (controls.keyboard.zoom.active)
controls._register_keyboard_zoom(controls);
// watch keyboard move properties
watch(controls.keyboard.move,['active'], function() {
if (controls.keyboard.move.active)
controls._register_keyboard_move(controls);
else
controls._unregister_keyboard_move(controls);
});
// watch keyboard zoom properties
watch(controls.keyboard.zoom,['active'], function() {
if (controls.keyboard.zoom.active)
controls._register_keyboard_zoom(controls);
else
controls._unregister_keyboard_zoom(controls);
});
},
// [private] _register_keyboard_move() method
_register_keyboard_move: function(controls) {
$(document).on('keydown',{controls: controls},controls._keyboard_move);
},
// [private] _unregister_keyboard_move() method
_unregister_keyboard_move: function(controls) {
$(document).off('keydown',controls._keyboard_move);
},
// [private] _register_keyboard_zoom() method
_register_keyboard_zoom: function(controls) {
$(document).on('keydown',{controls: controls},controls._keyboard_zoom);
},
// [private] _unregister_keyboard_zoom() method
_unregister_keyboard_zoom: function(controls) {
$(document).off('keydown',controls._keyboard_zoom);
},
// [private] _keyboard_move() method
_keyboard_move: function(e) {
var controls = e.data.controls;
if (!controls.keyboard.move.active)
return;
var needDrawScene = true;
var moveStep = controls.keyboard.move.step;
// move
switch(e.keyCode) {
case 37: // arrow left
controls.panorama.lon -= moveStep;
break;
case 38: // arrow top
controls.panorama.lat -= moveStep;
break;
case 39: // arrow right
controls.panorama.lon += moveStep;
break;
case 40: // arrow bottom
controls.panorama.lat += moveStep;
break;
default:
needDrawScene = false;
}
// update
if (needDrawScene)
controls.panorama.drawScene();
},
// [private] _keyboard_zoom() method
_keyboard_zoom: function(e) {
var controls = e.data.controls;
if (!controls.keyboard.zoom.active)
return;
var needZoomUpdate = true;
var zoomStep = controls.keyboard.zoom.step == null ?
controls.panorama.camera.zoom.step : controls.keyboard.zoom.step;
// zoom
switch(e.keyCode) {
case 107: // [-] key
controls.panorama.camera.zoom.current -= zoomStep;
break;
case 109: // [+] key
controls.panorama.camera.zoom.current += zoomStep;
break;
default:
needZoomUpdate = false;
}
// update
if (needZoomUpdate)
controls.panorama.zoomUpdate();
},
// device
device: {
ticks: 0
},
// [private] _init_devicemotion() method
_init_devicemotion: function() {
var controls = this;
// devicemotion move
if (controls.devicemotion.move.active)
controls._register_devicemotion_move(controls);
// watch devicemotion move properties
watch(controls.devicemotion.move,['active'], function() {
if (controls.devicemotion.move.active)
controls._register_devicemotion_move(controls);
else
controls._unregister_devicemotion_move(controls);
});
},
// [private] _register_devicemotion_move() method
_register_devicemotion_move: function(controls) {
// pass controls
window._controls_devicemotion = controls;
// orientation
if (window.DeviceOrientationEvent)
window.addEventListener('deviceorientation',controls._device_move_by_orientation,false);
// motion
if (window.DeviceMotionEvent)
window.addEventListener('devicemotion',controls._device_move_by_device_motion,false);
},
// [private] _unregister_devicemotion_move() method
_unregister_devicemotion_move: function(controls) {
// orientation
if (window.DeviceOrientationEvent)
window.removeEventListener('deviceorientation',controls._device_move_by_orientation,false);
// motion
if (window.DeviceMotionEvent)
window.removeEventListener('devicemotion',controls._device_move_by_device_motion,false);
// clear controls
window._controls_devicemotion = null;
},
// [private] _device_move_by_orientation() method
_device_move_by_orientation: function(e) {
var controls = window._controls_devicemotion;
if (!controls.devicemotion.move.active || !controls.orientation.portrait)
return;
// limit ticks rate
controls.device.ticks++;
if (controls.device.ticks <= controls.devicemotion.nth)
return;
else
controls.device.ticks = 0;
// target
var lon = 360-e.alpha;
var lat = -(90-e.beta);
// update
var needDrawScene = !(controls.panorama.lon == lon && controls.panorama.lat == lat);
// move
controls.panorama.lon = lon;
controls.panorama.lat = lat;
if (needDrawScene)
controls.panorama.drawScene();
},
// [private] _device_move_by_device_motion() method
_device_move_by_device_motion: function(e) {
var controls = window._controls_devicemotion;
if (!controls.devicemotion.move.active || !controls.orientation.landscape)
return;
// limit ticks rate
controls.device.ticks++;
if (controls.device.ticks <= controls.devicemotion.nth)
return;
else
controls.device.ticks = 0;
// rotation
var rotation = e.rotationRate;
var x, y, z = 0;
x = rotation.alpha;
y = rotation.beta;
// noise
var noise = controls.devicemotion.move.noise;
if (x > -noise && x < noise)
x = 0;
if (y > -noise && y < noise)
y = 0;
// update
var needDrawScene = (x != 0 || y != 0);
// move
controls.panorama.lon -= x / controls.devicemotion.move.sensivity;
controls.panorama.lat += y / controls.devicemotion.move.sensivity;
if (needDrawScene)
controls.panorama.drawScene();
}
});
/**
* Extends Panorama prototype
*/
$.extend(Panorama.prototype, {
// init() method
init: function panorama_init() {
var panorama = this;
// controls defined in freepano options
if (typeof panorama.controls !== 'undefined') {
if (!(panorama.controls instanceof Controls)) {
// convert options to instanciated class
panorama.controls = new Controls($.extend(true,{
panorama: panorama,
callback: function() {
Controls.prototype.panorama_init.call(panorama);
}
},panorama.controls));
}
} else {
Controls.prototype.panorama_init.call(panorama);
}
}
});
| Better handling of device events. Use HTML5 DeviceMotion API instead of DeviceOrientation.
| js/jquery.freepano.controls.js | Better handling of device events. Use HTML5 DeviceMotion API instead of DeviceOrientation. | <ide><path>s/jquery.freepano.controls.js
<ide> * Author(s):
<ide> *
<ide> * Alexandre Kraft <[email protected]>
<add> *
<add> *
<add> * Contributor(s):
<add> *
<add> * Nils Hamel <[email protected]>
<ide> *
<ide> *
<ide> * This file is part of the FOXEL project <http://foxel.ch>.
<ide> },
<ide> zoom: {
<ide> active: false,
<del> step: null // value, or [null] same as panorama.camera.zoom.step
<add> step: null // value, or [null] meaning the same as panorama.camera.zoom.step
<ide> }
<ide> },
<ide>
<ide> // device motion
<ide> devicemotion: {
<del> nth: 5, // limit event action to nth ticks
<ide> move: {
<del> active: false,
<del> noise: 5,
<del> sensivity: 10
<add> active: false
<add> },
<add> internal: {
<add> ticks: {
<add> nth: 5,
<add> count: 0, // [auto]
<add> time: 0 // [auto]
<add> },
<add> orientation: {
<add> lon: 0, // [auto]
<add> lat: 0 // [auto]
<add> },
<add> gravity: {
<add> aligned: false, // [auto]
<add> sign: 1 // [auto] -1/+1 following device orientation
<add> }
<ide> }
<ide> }
<ide>
<ide> orientation_detect: function() {
<ide> this.orientation.portrait = ($(window).width() < $(window).height());
<ide> this.orientation.landscape = !this.orientation.portrait;
<add> },
<add>
<add> // gravity_alignment() method
<add> gravity_alignment: function() {
<add>
<add> this.devicemotion.internal.orientation.lon = 0; //todo!
<add> this.devicemotion.internal.orientation.lat = 0; //todo!
<add>
<add> // gravity set? todo!
<add> this.devicemotion.internal.gravity.aligned = true;
<add>
<ide> },
<ide>
<ide> // [private] _init_keyboard() method
<ide>
<ide> },
<ide>
<del> // device
<del> device: {
<del> ticks: 0
<del> },
<del>
<ide> // [private] _init_devicemotion() method
<ide> _init_devicemotion: function() {
<ide>
<ide> // pass controls
<ide> window._controls_devicemotion = controls;
<ide>
<del> // orientation
<del> if (window.DeviceOrientationEvent)
<del> window.addEventListener('deviceorientation',controls._device_move_by_orientation,false);
<del>
<del> // motion
<add> // html5 device motion
<ide> if (window.DeviceMotionEvent)
<ide> window.addEventListener('devicemotion',controls._device_move_by_device_motion,false);
<ide>
<ide>
<ide> // [private] _unregister_devicemotion_move() method
<ide> _unregister_devicemotion_move: function(controls) {
<del>
<del> // orientation
<del> if (window.DeviceOrientationEvent)
<del> window.removeEventListener('deviceorientation',controls._device_move_by_orientation,false);
<ide>
<ide> // motion
<ide> if (window.DeviceMotionEvent)
<ide> window.removeEventListener('devicemotion',controls._device_move_by_device_motion,false);
<ide>
<add> // reset time
<add> controls.devicemotion.internal.ticks.time = 0;
<add>
<ide> // clear controls
<ide> window._controls_devicemotion = null;
<ide>
<ide> },
<ide>
<del> // [private] _device_move_by_orientation() method
<del> _device_move_by_orientation: function(e) {
<add> // [private] _device_move_by_device_motion() method
<add> _device_move_by_device_motion: function(e) {
<ide>
<ide> var controls = window._controls_devicemotion;
<del> if (!controls.devicemotion.move.active || !controls.orientation.portrait)
<del> return;
<del>
<del> // limit ticks rate
<del> controls.device.ticks++;
<del> if (controls.device.ticks <= controls.devicemotion.nth)
<del> return;
<del> else
<del> controls.device.ticks = 0;
<del>
<del> // target
<del> var lon = 360-e.alpha;
<del> var lat = -(90-e.beta);
<del>
<del> // update
<del> var needDrawScene = !(controls.panorama.lon == lon && controls.panorama.lat == lat);
<del>
<del> // move
<add> if (!controls.devicemotion.move.active)
<add> return;
<add>
<add> // first tick
<add> if (controls.devicemotion.internal.ticks.time == 0) {
<add>
<add> // init time
<add> controls.devicemotion.internal.ticks.time = (new Date()).getTime();
<add>
<add> // gravity alignment
<add> controls.gravity_alignment();
<add> return;
<add>
<add> }
<add>
<add> // check for gravity alignment
<add> if (!controls.devicemotion.internal.gravity.aligned)
<add> return;
<add>
<add> // time
<add> var now = (new Date()).getTime();
<add> var elapsed = (now - controls.devicemotion.internal.ticks.time) / 1000;
<add>
<add> // original orientation
<add> var lon = controls.devicemotion.internal.orientation.lon;
<add> var lat = controls.devicemotion.internal.orientation.lat;
<add>
<add> // panorama orientation per device orientation
<add> if (controls.orientation.portrait) {
<add> lon -= controls.devicemotion.internal.gravity.sign * e.rotationRate.beta * elapsed;
<add> lat -= controls.devicemotion.internal.gravity.sign * e.rotationRate.alpha * elapsed;
<add> } else {
<add> lon += controls.devicemotion.internal.gravity.sign * e.rotationRate.alpha * elapsed;
<add> lat -= controls.devicemotion.internal.gravity.sign * e.rotationRate.beta * elapsed;
<add> }
<add>
<add> // assign orientation
<ide> controls.panorama.lon = lon;
<ide> controls.panorama.lat = lat;
<del> if (needDrawScene)
<del> controls.panorama.drawScene();
<del>
<del> },
<del>
<del> // [private] _device_move_by_device_motion() method
<del> _device_move_by_device_motion: function(e) {
<del>
<del> var controls = window._controls_devicemotion;
<del> if (!controls.devicemotion.move.active || !controls.orientation.landscape)
<del> return;
<add> controls.devicemotion.internal.orientation.lon = lon;
<add> controls.devicemotion.internal.orientation.lat = lat;
<add>
<add> // store time
<add> controls.devicemotion.internal.ticks.time = now;
<ide>
<ide> // limit ticks rate
<del> controls.device.ticks++;
<del> if (controls.device.ticks <= controls.devicemotion.nth)
<add> controls.devicemotion.internal.ticks.count++;
<add> if (controls.devicemotion.internal.ticks.count <= controls.devicemotion.internal.ticks.nth)
<ide> return;
<ide> else
<del> controls.device.ticks = 0;
<del>
<del> // rotation
<del> var rotation = e.rotationRate;
<del> var x, y, z = 0;
<del> x = rotation.alpha;
<del> y = rotation.beta;
<del>
<del> // noise
<del> var noise = controls.devicemotion.move.noise;
<del> if (x > -noise && x < noise)
<del> x = 0;
<del> if (y > -noise && y < noise)
<del> y = 0;
<del>
<del> // update
<del> var needDrawScene = (x != 0 || y != 0);
<del>
<del> // move
<del> controls.panorama.lon -= x / controls.devicemotion.move.sensivity;
<del> controls.panorama.lat += y / controls.devicemotion.move.sensivity;
<add> controls.devicemotion.internal.ticks.count = 0;
<add>
<add> // moved? todo!
<add> var needDrawScene = true;
<add>
<add> // draw scene
<ide> if (needDrawScene)
<ide> controls.panorama.drawScene();
<ide> |
|
JavaScript | mit | 3345f23b5cb595b7eb7985be1e7e024f5a644989 | 0 | iliyan-trifonov/notepads-nodejs-angularjs-mongodb-bootstrap,iliyan-trifonov/notepads-nodejs-angularjs-mongodb-bootstrap | 'use strict';
var Category = require('../../../src/models/category'),
User = require('../../../src/models/user'),
assert = require('assert'),
connection = require('../../db_common');
describe('Category Model', function () {
var db, testUser, testCats = [];
before(function (done) {
//TODO: use callback
db = connection();
//add some testing data
User.create({
facebookId: +new Date(),
name: 'Iliyan Trifonov'
}, function (err, user) {
assert.ifError(err);
assert.ok(user !== null);
testUser = user;
Category.create({
name: 'Test cat1',
user: user._id
}, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
testCats.push(category);
Category.create({
name: 'Test cat2',
user: user._id
}, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
testCats.push(category);
done();
});
});
});
});
after(function (done) {
Category.remove({}, function () {
User.remove({}, function () {
db.close();
done();
});
});
});
it('should create and save a new Category', function (done) {
var cat = {
name: 'Testcat',
notepadsCount: 0,
user: null
};
Category.create(cat , function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
assert.ok(category instanceof Category);
assert.strictEqual(category.name, cat.name);
assert.strictEqual(category.notepadsCount, cat.notepadsCount);
assert.strictEqual(category.user, cat.user);
done();
});
});
describe('getByUserId', function () {
it('should return categories by uid', function (done) {
Category.getByUserId(testUser._id, function (err, cats) {
assert.ifError(err);
assert.ok(cats !== null);
assert.ok(cats instanceof Array);
assert.ok(cats.length === 2);
assert.strictEqual(cats[0].name, testCats[0].name);
assert.strictEqual(cats[1].name, testCats[1].name);
done();
});
});
});
describe('getByIdForUser', function () {
it('should return a category id and name by given cat Id and uid', function (done) {
Category.getByIdForUser(testCats[0]._id, testUser._id, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
assert.ok(category instanceof Category);
assert.ok(category._id.equals(testCats[0]._id));
assert.strictEqual(category.name, testCats[0].name);
assert.ok(!category.user);
assert.ok(!category.notepadsCount);
done();
});
});
});
describe('increaseNotepadsCountById', function () {
it('should increase notepadsCount of a cat with 1 given cat id', function (done) {
Category.increaseNotepadsCountById(testCats[0]._id, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
assert.ok(category instanceof Category);
assert.ok(category._id.equals(testCats[0]._id));
assert.strictEqual(category.notepadsCount, testCats[0].notepadsCount + 1);
testCats[0] = category;
done();
});
});
});
describe('decreaseNotepadsCountById', function () {
it('should decrease notepadsCount of a cat with 1 given cat id', function (done) {
Category.decreaseNotepadsCountById(testCats[0]._id, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
assert.ok(category instanceof Category);
assert.ok(category._id.equals(testCats[0]._id));
assert.strictEqual(category.notepadsCount, testCats[0].notepadsCount - 1);
testCats[0] = category;
done();
});
});
});
});
| test/server/models/category_test.js | 'use strict';
var Category = require('../../../src/models/category'),
User = require('../../../src/models/user'),
assert = require('assert'),
connection = require('../../db_common');
describe('Category Model', function () {
var db, testUser, testCats = [];
before(function (done) {
db = connection();
//add some testing data
User.create({
facebookId: +new Date(),
name: 'Iliyan Trifonov'
}, function (err, user) {
assert.ifError(err);
assert.ok(user !== null);
testUser = user;
Category.create({
name: 'Test cat1',
user: user._id
}, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
testCats.push(category);
Category.create({
name: 'Test cat2',
user: user._id
}, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
testCats.push(category);
done();
});
});
});
});
after(function (done) {
Category.remove({}, function () {
User.remove({}, function () {
done();
});
});
});
it('should create and save a new Category', function (done) {
var cat = {
name: 'Testcat',
notepadsCount: 0,
user: null
};
Category.create(cat , function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
assert.ok(category instanceof Category);
assert.strictEqual(category.name, cat.name);
assert.strictEqual(category.notepadsCount, cat.notepadsCount);
assert.strictEqual(category.user, cat.user);
done();
});
});
describe('getByUserId', function () {
it('should return categories by uid', function (done) {
Category.getByUserId(testUser._id, function (err, cats) {
assert.ifError(err);
assert.ok(cats !== null);
assert.ok(cats instanceof Array);
assert.ok(cats.length === 2);
assert.strictEqual(cats[0].name, testCats[0].name);
assert.strictEqual(cats[1].name, testCats[1].name);
done();
});
});
});
describe('getByIdForUser', function () {
it('should return a category id and name by given cat Id and uid', function (done) {
Category.getByIdForUser(testCats[0]._id, testUser._id, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
assert.ok(category instanceof Category);
assert.ok(category._id.equals(testCats[0]._id));
assert.strictEqual(category.name, testCats[0].name);
assert.ok(!category.user);
assert.ok(!category.notepadsCount);
done();
});
});
});
describe('increaseNotepadsCountById', function () {
it('should increase notepadsCount of a cat with 1 given cat id', function (done) {
Category.increaseNotepadsCountById(testCats[0]._id, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
assert.ok(category instanceof Category);
assert.ok(category._id.equals(testCats[0]._id));
assert.strictEqual(category.notepadsCount, testCats[0].notepadsCount + 1);
testCats[0] = category;
done();
});
});
});
describe('decreaseNotepadsCountById', function () {
it('should decrease notepadsCount of a cat with 1 given cat id', function (done) {
Category.decreaseNotepadsCountById(testCats[0]._id, function (err, category) {
assert.ifError(err);
assert.ok(category !== null);
assert.ok(category instanceof Category);
assert.ok(category._id.equals(testCats[0]._id));
assert.strictEqual(category.notepadsCount, testCats[0].notepadsCount - 1);
testCats[0] = category;
done();
});
});
});
});
| added db.close() in after()
| test/server/models/category_test.js | added db.close() in after() | <ide><path>est/server/models/category_test.js
<ide> var db, testUser, testCats = [];
<ide>
<ide> before(function (done) {
<add> //TODO: use callback
<ide> db = connection();
<del>
<ide> //add some testing data
<ide> User.create({
<ide> facebookId: +new Date(),
<ide> after(function (done) {
<ide> Category.remove({}, function () {
<ide> User.remove({}, function () {
<add> db.close();
<ide> done();
<ide> });
<ide> }); |
|
Java | apache-2.0 | 622ed452062c556623e982818b82e1b2caac0e89 | 0 | stevespringett/dependency-track,stevespringett/dependency-track,stevespringett/dependency-track | /*
* This file is part of Dependency-Track.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) Steve Springett. All Rights Reserved.
*/
package org.owasp.dependencytrack.persistence;
import alpine.Config;
import alpine.event.framework.SingleThreadedEventService;
import alpine.persistence.AlpineQueryManager;
import alpine.persistence.PaginatedResult;
import alpine.resources.AlpineRequest;
import org.apache.commons.lang3.StringUtils;
import org.owasp.dependencytrack.event.IndexEvent;
import org.owasp.dependencytrack.model.Bom;
import org.owasp.dependencytrack.model.Component;
import org.owasp.dependencytrack.model.ComponentMetrics;
import org.owasp.dependencytrack.model.Cwe;
import org.owasp.dependencytrack.model.Dependency;
import org.owasp.dependencytrack.model.Evidence;
import org.owasp.dependencytrack.model.License;
import org.owasp.dependencytrack.model.PortfolioMetrics;
import org.owasp.dependencytrack.model.Project;
import org.owasp.dependencytrack.model.ProjectMetrics;
import org.owasp.dependencytrack.model.ProjectProperty;
import org.owasp.dependencytrack.model.Scan;
import org.owasp.dependencytrack.model.Tag;
import org.owasp.dependencytrack.model.Vulnerability;
import org.owasp.dependencytrack.model.VulnerabilityMetrics;
import javax.jdo.FetchPlan;
import javax.jdo.Query;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.UUID;
/**
* This QueryManager provides a concrete extension of {@link AlpineQueryManager} by
* providing methods that operate on the Dependency-Track specific models.
*
* @author Steve Springett
* @since 3.0.0
*/
public class QueryManager extends AlpineQueryManager {
private static final boolean ENFORCE_AUTHORIZATION = Config.getInstance().getPropertyAsBoolean(Config.AlpineKey.ENFORCE_AUTHORIZATION);
/**
* Default constructor.
*/
public QueryManager() {
super();
}
/**
* Constructs a new QueryManager.
* @param request an AlpineRequest object
*/
public QueryManager(final AlpineRequest request) {
super(request);
}
/**
* Returns a list of all projets.
* @return a List of Projects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getProjects() {
final Query query = pm.newQuery(Project.class);
query.setOrdering("name asc");
if (filter != null) {
query.setFilter("name.toLowerCase().matches(:name)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query);
}
/**
* Returns a project by it's name.
* @param name the name of the Project
* @return a Project object, or null if not found
*/
@SuppressWarnings("unchecked")
public Project getProject(String name) {
final Query query = pm.newQuery(Project.class, "name == :name");
final List<Project> result = (List<Project>) query.execute(name);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns a paginated result of projects by tag.
* @param tag the tag associated with the Project
* @return a List of Projects that contain the tag
*/
public PaginatedResult getProjects(Tag tag) {
final Query query = pm.newQuery(Project.class, "tags.contains(:tag)");
query.setOrdering("name asc");
return execute(query, tag);
}
/**
* Returns a list of Tag objects what have been resolved. It resolved
* tags by querying the database to retrieve the tag. If the tag does
* not exist, the tag will be created and returned with other resolved
* tags.
* @param tags a List of Tags to resolve
* @return List of resolved Tags
*/
@SuppressWarnings("unchecked")
public synchronized List<Tag> resolveTags(List<Tag> tags) {
if (tags == null) {
return new ArrayList<>();
}
final List<Tag> resolvedTags = new ArrayList<>();
final List<String> unresolvedTags = new ArrayList<>();
for (Tag tag: tags) {
final String trimmedTag = StringUtils.trimToNull(tag.getName());
if (trimmedTag != null) {
final Tag resolvedTag = getTagByName(trimmedTag);
if (resolvedTag != null) {
resolvedTags.add(resolvedTag);
} else {
unresolvedTags.add(trimmedTag);
}
}
}
resolvedTags.addAll(createTags(unresolvedTags));
return resolvedTags;
}
/**
* Returns a list of Tag objects by name.
* @param name the name of the Tag
* @return a Tag object
*/
@SuppressWarnings("unchecked")
public Tag getTagByName(String name) {
final String trimmedTag = StringUtils.trimToNull(name);
final Query query = pm.newQuery(Tag.class, "name == :name");
final List<Tag> result = (List<Tag>) query.execute(trimmedTag);
return result.size() == 0 ? null : result.get(0);
}
/**
* Creates a new Tag object with the specified name.
* @param name the name of the Tag to create
* @return the created Tag object
*/
public Tag createTag(String name) {
final String trimmedTag = StringUtils.trimToNull(name);
final Tag resolvedTag = getTagByName(trimmedTag);
if (resolvedTag != null) {
return resolvedTag;
}
final Tag tag = new Tag();
tag.setName(trimmedTag);
return persist(tag);
}
/**
* Creates one or more Tag objects from the specified name(s).
* @param names the name(s) of the Tag(s) to create
* @return the created Tag object(s)
*/
public List<Tag> createTags(List<String> names) {
final List<Tag> newTags = new ArrayList<>();
for (String name: names) {
final String trimmedTag = StringUtils.trimToNull(name);
if (getTagByName(trimmedTag) == null) {
final Tag tag = new Tag();
tag.setName(trimmedTag);
newTags.add(tag);
}
}
return new ArrayList<>(persist(newTags));
}
/**
* Creates a new Project.
* @param name the name of the project to create
* @param description a description of the project
* @param version the project version
* @param tags a List of Tags - these will be resolved if necessary
* @param parent an optional parent Project
* @param purl an optional Package URL
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return the created Project
*/
public Project createProject(String name, String description, String version, List<Tag> tags, Project parent, String purl, boolean commitIndex) {
final Project project = new Project();
project.setName(name);
project.setDescription(description);
project.setVersion(version);
project.setTags(resolveTags(tags));
if (parent != null) {
project.setParent(parent);
}
project.setPurl(purl);
final Project result = persist(project);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.CREATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Project.class);
return result;
}
/**
* Updates an existing Project.
* @param uuid the uuid of the project to update
* @param name the name of the project
* @param description a description of the project
* @param version the project version
* @param tags a List of Tags - these will be resolved if necessary
* @param purl an optional Package URL
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return the updated Project
*/
public Project updateProject(UUID uuid, String name, String description, String version, List<Tag> tags, String purl, boolean commitIndex) {
final Project project = getObjectByUuid(Project.class, uuid);
project.setName(name);
project.setDescription(description);
project.setVersion(version);
project.setPurl(purl);
List<Tag> resolvedTags = resolveTags(tags);
bind(project, resolvedTags);
final Project result = persist(project);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.UPDATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Project.class);
return result;
}
/**
* Updates the last time a scan was imported.
* @param date the date of the last scan import
* @return the updated Project
*/
public Project updateLastScanImport(Project p, Date date) {
final Project project = getObjectById(Project.class, p.getId());
project.setLastScanImport(date);
return persist(project);
}
/**
* Updates the last time a bom was imported.
* @param date the date of the last bom import
* @return the updated Project
*/
public Project updateLastBomImport(Project p, Date date) {
final Project project = getObjectById(Project.class, p.getId());
project.setLastBomImport(date);
return persist(project);
}
/**
* Deletes a Project and all objects dependant on the project.
* @param project the Project to delete
*/
public void recursivelyDelete(Project project) {
if (project.getChildren() != null) {
for (Project child: project.getChildren()) {
recursivelyDelete(child);
}
}
pm.getFetchPlan().setDetachmentOptions(FetchPlan.DETACH_LOAD_FIELDS);
final Project result = pm.getObjectById(Project.class, project.getId());
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.DELETE, pm.detachCopy(result)));
deleteMetrics(project);
deleteDependencies(project);
deleteScans(project);
deleteBoms(project);
delete(project.getProperties());
delete(getScans(project));
delete(getBoms(project));
delete(project.getChildren());
delete(project);
}
/**
* Creates a key/value pair (ProjectProperty) for the specified Project.
* @param project the Project to create the property for
* @param key the key of the property
* @param value the value of the property
* @return the created ProjectProperty object
*/
public ProjectProperty createProjectProperty(Project project, String key, String value) {
final ProjectProperty property = new ProjectProperty();
property.setProject(project);
property.setKey(key);
property.setValue(value);
return persist(property);
}
/**
* Creates a new Scan.
* @param project the Project to create a Scan for
* @param executed the Date when the scan was executed
* @param imported the Date when the scan was imported
* @return a new Scan object
*/
public Scan createScan(Project project, Date executed, Date imported) {
final Scan scan = new Scan();
scan.setExecuted(executed);
scan.setImported(imported);
scan.setProject(project);
return persist(scan);
}
/**
* Returns a list of all Scans for the specified Project.
* @param project the Project to retrieve scans for
* @return a List of Scans
*/
@SuppressWarnings("unchecked")
public List<Scan> getScans(Project project) {
final Query query = pm.newQuery(Scan.class, "project == :project");
return (List<Scan>) query.execute(project);
}
/**
* Deletes scans belonging to the specified Project.
* @param project the Project to delete scans for
*/
public void deleteScans(Project project) {
final Query query = pm.newQuery(Scan.class, "project == :project");
query.deletePersistentAll(project);
}
/**
* Deletes scans belonging to the specified Component.
* @param component the Component to delete scans for
*/
@SuppressWarnings("unchecked")
public void deleteScans(Component component) {
final Query query = pm.newQuery(Scan.class, "components.contains(component)");
for (Scan scan: (List<Scan>) query.execute(component)) {
scan.getComponents().remove(component);
persist(scan);
}
}
/**
* Creates a new Bom.
* @param project the Project to create a Bom for
* @param imported the Date when the bom was imported
* @return a new Bom object
*/
public Bom createBom(Project project, Date imported) {
final Bom bom = new Bom();
bom.setImported(imported);
bom.setProject(project);
return persist(bom);
}
/**
* Returns a list of all Bom for the specified Project.
* @param project the Project to retrieve boms for
* @return a List of Boms
*/
@SuppressWarnings("unchecked")
public List<Bom> getBoms(Project project) {
final Query query = pm.newQuery(Bom.class, "project == :project");
return (List<Bom>) query.execute(project);
}
/**
* Deletes boms belonging to the specified Project.
* @param project the Project to delete boms for
*/
public void deleteBoms(Project project) {
final Query query = pm.newQuery(Bom.class, "project == :project");
query.deletePersistentAll(project);
}
/**
* Deletes boms belonging to the specified Component.
* @param component the Component to delete boms for
*/
@SuppressWarnings("unchecked")
public void deleteBoms(Component component) {
final Query query = pm.newQuery(Bom.class, "components.contains(component)");
for (Bom bom: (List<Bom>) query.execute(component)) {
bom.getComponents().remove(component);
persist(bom);
}
}
/**
* Returns a list of all Components defined in the datastore.
* @return a List of Components
*/
@SuppressWarnings("unchecked")
public PaginatedResult getComponents() {
final Query query = pm.newQuery(Component.class);
query.setOrdering("name asc");
if (filter != null) {
query.setFilter("name.toLowerCase().matches(:name)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query);
}
/**
* Returns a Component by its hash. Supports MD5, SHA-1, SHA-256, SHA-512, SHA3-256, and SHA3-512 hashes.
* @param hash the hash of the component to retrieve
* @return a Component, or null if not found
*/
@SuppressWarnings("unchecked")
public Component getComponentByHash(String hash) {
if (hash == null) {
return null;
}
final Query query;
if (hash.length() == 32) {
query = pm.newQuery(Component.class, "md5 == :hash");
} else if (hash.length() == 40) {
query = pm.newQuery(Component.class, "sha1 == :hash");
} else if (hash.length() == 64) {
query = pm.newQuery(Component.class, "sha256 == :hash || sha3_256 == :hash");
} else if (hash.length() == 128) {
query = pm.newQuery(Component.class, "sha512 == :hash || sha3_512 == :hash");
} else {
return null;
}
final List<Component> result = (List<Component>) query.execute(hash);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns a Component by group, name, and version.
* @param group the group of the component to retrieve
* @param name the name of the component to retrieve
* @param version the version of the component to retrieve
* @return a Component, or null if not found
*/
@SuppressWarnings("unchecked")
public Component getComponentByAttributes(String group, String name, String version) {
final Query query = pm.newQuery(Component.class, "group == :group && name == :name && version == :version");
final List<Component> result = (List<Component>) query.execute(group, name, version);
return result.size() == 0 ? null : result.get(0);
}
/**
* Creates a new Component.
* @param component the Component to persist
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a new Component
*/
public Component createComponent(Component component, boolean commitIndex) {
final Component result = persist(component);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.CREATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Component.class);
return result;
}
/**
* Updated an existing Component.
* @param transientComponent the component to update
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a Component
*/
public Component updateComponent(Component transientComponent, boolean commitIndex) {
final Component component = getObjectByUuid(Component.class, transientComponent.getUuid());
component.setName(transientComponent.getName());
component.setVersion(transientComponent.getVersion());
component.setGroup(transientComponent.getGroup());
component.setFilename(transientComponent.getFilename());
component.setMd5(transientComponent.getMd5());
component.setSha1(transientComponent.getSha1());
component.setSha256(transientComponent.getSha256());
component.setSha512(transientComponent.getSha512());
component.setSha3_256(transientComponent.getSha3_256());
component.setSha3_512(transientComponent.getSha3_512());
component.setDescription(transientComponent.getDescription());
component.setCopyright(transientComponent.getCopyright());
component.setLicense(transientComponent.getLicense());
component.setResolvedLicense(transientComponent.getResolvedLicense());
component.setParent(transientComponent.getParent());
component.setCpe(transientComponent.getCpe());
component.setPurl(transientComponent.getPurl());
final Component result = persist(component);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.UPDATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Component.class);
return result;
}
/**
* Deletes a Component and all objects dependant on the component.
* @param component the Component to delete
* @param commitIndex specifies if the search index should be committed (an expensive operation)
*/
public void recursivelyDelete(Component component, boolean commitIndex) {
if (component.getChildren() != null) {
for (Component child: component.getChildren()) {
recursivelyDelete(child, false);
}
}
pm.getFetchPlan().setDetachmentOptions(FetchPlan.DETACH_LOAD_FIELDS);
final Component result = pm.getObjectById(Component.class, component.getId());
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.DELETE, pm.detachCopy(result)));
deleteMetrics(component);
deleteDependencies(component);
deleteScans(component);
deleteBoms(component);
delete(component);
commitSearchIndex(commitIndex, Component.class);
}
/**
* Creates new evidence for a Component.
* @param component the Component to create evidence for
* @param type the type of evidence
* @param confidenceScore the confidence score
* @param source the source of where the evidence was obtained from
* @param name the name of the evidence
* @param value the value of the evidence
* @return a new Evidence object
*/
public Evidence createEvidence(Component component, String type, int confidenceScore,
String source, String name, String value) {
final Evidence evidence = new Evidence();
evidence.setComponent(component);
evidence.setType(type);
evidence.setConfidence(confidenceScore);
evidence.setSource(source);
evidence.setName(name);
evidence.setValue(value);
return persist(evidence);
}
/**
* Returns a List of all License objects.
* @return a List of all License objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getLicenses() {
final Query query = pm.newQuery(License.class);
query.setOrdering("name asc");
if (filter != null) {
query.setFilter("name.toLowerCase().matches(:filter) || licenseId.toLowerCase().matches(:filter)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query);
}
/**
* Returns a License object from the specified SPDX license ID.
* @param licenseId the SPDX license ID to retrieve
* @return a License object, or null if not found
*/
@SuppressWarnings("unchecked")
public License getLicense(String licenseId) {
final Query query = pm.newQuery(License.class, "licenseId == :licenseId");
final List<License> result = (List<License>) query.execute(licenseId);
return result.size() == 0 ? null : result.get(0);
}
/**
* Creates a new License.
* @param license the License object to create
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a created License object
*/
public License createLicense(License license, boolean commitIndex) {
final License result = persist(license);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.CREATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, License.class);
return result;
}
/**
* Creates a new Vulnerability.
* @param vulnerability the vulnerability to persist
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a new vulnerability object
*/
public Vulnerability createVulnerability(Vulnerability vulnerability, boolean commitIndex) {
final Vulnerability result = persist(vulnerability);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.CREATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Vulnerability.class);
return result;
}
/**
* Updates a vulnerability.
* @param transientVulnerability the vulnerability to update
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a Vulnerability object
*/
public Vulnerability updateVulnerability(Vulnerability transientVulnerability, boolean commitIndex) {
final Vulnerability vulnerability;
if (transientVulnerability.getId() > 0) {
vulnerability = getObjectById(Vulnerability.class, transientVulnerability.getId());
} else {
vulnerability = getVulnerabilityByVulnId(transientVulnerability.getSource(), transientVulnerability.getVulnId());
}
if (vulnerability != null) {
vulnerability.setCreated(transientVulnerability.getCreated());
vulnerability.setPublished(transientVulnerability.getPublished());
vulnerability.setUpdated(transientVulnerability.getUpdated());
vulnerability.setVulnId(transientVulnerability.getVulnId());
vulnerability.setSource(transientVulnerability.getSource());
vulnerability.setCredits(transientVulnerability.getCredits());
vulnerability.setVulnerableVersions(transientVulnerability.getVulnerableVersions());
vulnerability.setPatchedVersions(transientVulnerability.getPatchedVersions());
vulnerability.setDescription(transientVulnerability.getDescription());
vulnerability.setTitle(transientVulnerability.getTitle());
vulnerability.setSubTitle(transientVulnerability.getSubTitle());
vulnerability.setReferences(transientVulnerability.getReferences());
vulnerability.setRecommendation(transientVulnerability.getRecommendation());
vulnerability.setCwe(transientVulnerability.getCwe());
vulnerability.setCvssV2Vector(transientVulnerability.getCvssV2Vector());
vulnerability.setCvssV2BaseScore(transientVulnerability.getCvssV2BaseScore());
vulnerability.setCvssV2ImpactSubScore(transientVulnerability.getCvssV2ImpactSubScore());
vulnerability.setCvssV2ExploitabilitySubScore(transientVulnerability.getCvssV2ExploitabilitySubScore());
vulnerability.setCvssV3Vector(transientVulnerability.getCvssV3Vector());
vulnerability.setCvssV3BaseScore(transientVulnerability.getCvssV3BaseScore());
vulnerability.setCvssV3ImpactSubScore(transientVulnerability.getCvssV3ImpactSubScore());
vulnerability.setCvssV3ExploitabilitySubScore(transientVulnerability.getCvssV3ExploitabilitySubScore());
vulnerability.setMatchedAllPreviousCPE(transientVulnerability.getMatchedAllPreviousCPE());
vulnerability.setMatchedCPE(transientVulnerability.getMatchedCPE());
final Vulnerability result = persist(vulnerability);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.UPDATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Vulnerability.class);
return result;
}
return null;
}
/**
* Synchronizes a vulnerability. Method first checkes to see if the vulnerability already
* exists and if so, updates the vulnerability. If the vulnerability does not already exist,
* this method will create a new vulnerability.
* @param vulnerability the vulnerability to synchronize
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a Vulnerability object
*/
public Vulnerability synchronizeVulnerability(Vulnerability vulnerability, boolean commitIndex) {
Vulnerability result = updateVulnerability(vulnerability, commitIndex);
if (result == null) {
result = createVulnerability(vulnerability, commitIndex);
}
return result;
}
/**
* Returns a vulnerability by it's name (i.e. CVE-2017-0001)
* @param vulnId the name of the vulnerability
* @return the matching Vulnerability object, or null if not found
*/
@SuppressWarnings("unchecked")
public Vulnerability getVulnerabilityByVulnId(String vulnId) {
final Query query = pm.newQuery(Vulnerability.class, "vulnId == :vulnId");
query.getFetchPlan().addGroup(Vulnerability.FetchGroup.COMPONENTS.name());
final List<Vulnerability> result = (List<Vulnerability>) query.execute(vulnId);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns a vulnerability by it's name (i.e. CVE-2017-0001) and source.
* @param source the source of the vulnerability
* @param vulnId the name of the vulnerability
* @return the matching Vulnerability object, or null if not found
*/
@SuppressWarnings("unchecked")
public Vulnerability getVulnerabilityByVulnId(String source, String vulnId) {
final Query query = pm.newQuery(Vulnerability.class, "source == :source && vulnId == :vulnId");
query.getFetchPlan().addGroup(Vulnerability.FetchGroup.COMPONENTS.name());
final List<Vulnerability> result = (List<Vulnerability>) query.execute(source, vulnId);
return result.size() == 0 ? null : result.get(0);
}
/**
* Adds a vulnerability to a component.
* @param vulnerability the vulnerabillity to add
* @param component the component affected by the vulnerabiity
*/
@SuppressWarnings("unchecked")
public void addVulnerability(Vulnerability vulnerability, Component component) {
vulnerability = getObjectById(Vulnerability.class, vulnerability.getId());
component = getObjectById(Component.class, component.getId());
if (!contains(vulnerability, component)) {
pm.currentTransaction().begin();
component.addVulnerability(vulnerability);
pm.currentTransaction().commit();
}
}
/**
* Removes a vulnerability from a component.
* @param vulnerability the vulnerabillity to remove
* @param component the component unaffected by the vulnerabiity
*/
@SuppressWarnings("unchecked")
public void removeVulnerability(Vulnerability vulnerability, Component component) {
vulnerability = getObjectById(Vulnerability.class, vulnerability.getId());
component = getObjectById(Component.class, component.getId());
if (contains(vulnerability, component)) {
pm.currentTransaction().begin();
component.removeVulnerability(vulnerability);
pm.currentTransaction().commit();
}
}
/**
* Determines if a Component is affected by a specific Vulnerability by checking
* {@link Vulnerability#getSource()} and {@link Vulnerability#getVulnId()}.
* @param vulnerability The vulnerability to check if associated with component
* @param component The component to check against
* @return true if vulnerability is associated with the component, false if not
*/
public boolean contains(Vulnerability vulnerability, Component component) {
vulnerability = getObjectById(Vulnerability.class, vulnerability.getId());
component = getObjectById(Component.class, component.getId());
for (Vulnerability vuln: component.getVulnerabilities()) {
if (vuln.getSource() != null && vuln.getSource().equals(vulnerability.getSource())
&& vuln.getVulnId() != null && vuln.getVulnId().equals(vulnerability.getVulnId())) {
return true;
}
}
return false;
}
/**
* Checks if the specified CWE id exists or not. If not, creates
* a new CWE with the specified ID and name. In both cases, the
* CWE will be returned.
* @param id the CWE ID
* @param name the name of the CWE
* @return a CWE object
*/
public Cwe createCweIfNotExist(int id, String name) {
Cwe cwe = getCweById(id);
if (cwe != null) {
return cwe;
}
cwe = new Cwe();
cwe.setCweId(id);
cwe.setName(name);
return persist(cwe);
}
/**
* Returns a CWE by it's CWE-ID.
* @param cweId the CWE-ID
* @return a CWE object, or null if not found
*/
@SuppressWarnings("unchecked")
public Cwe getCweById(int cweId) {
final Query query = pm.newQuery(Cwe.class, "cweId == :cweId");
final List<Cwe> result = (List<Cwe>) query.execute(cweId);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns a complete list of all CWE's.
* @return a List of CWEs
*/
@SuppressWarnings("unchecked")
public PaginatedResult getCwes() {
final Query query = pm.newQuery(Cwe.class);
query.setOrdering("id asc");
if (filter != null) {
query.setFilter("cweId == :cweId || name.toLowerCase().matches(:name)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filter, filterString);
}
return execute(query);
}
/**
* Checks if the specified Dependency exists or not. If not, creates
* a new Dependency with the specified project and component. In both
* cases, the Dependency will be returned.
* @param project the Project
* @param component the Component
* @param addedBy optional string representation of a username
* @param notes any notes on why the dependency exists or its usage
* @return a Dependency object
*/
public Dependency createDependencyIfNotExist(Project project, Component component, String addedBy, String notes) {
Dependency dependency = getDependency(project, component);
if (dependency != null) {
return dependency;
}
dependency = new Dependency();
dependency.setProject(project);
dependency.setComponent(component);
dependency.setAddedBy(addedBy);
dependency.setAddedOn(new Date());
dependency.setNotes(notes);
return persist(dependency);
}
/**
* Checks if the specified Dependency exists or not. If so, removes
* the component as a dependency of the project.
* @param project the Project
* @param component the Component
*/
public void removeDependencyIfExist(Project project, Component component) {
Dependency dependency = getDependency(project, component);
if (dependency != null) {
delete(dependency);
}
}
/**
* Returns a List of all Dependency for the specified Project.
* This method if designed NOT to provide paginated results.
* @param project the Project to retrieve dependencies of
* @return a List of Dependency objects
*/
@SuppressWarnings("unchecked")
public List<Dependency> getAllDependencies(Project project) {
final Query query = pm.newQuery(Dependency.class, "project == :project");
query.getFetchPlan().addGroup(Dependency.FetchGroup.COMPONENT_ONLY.name());
query.setOrdering("component.name asc");
return (List<Dependency>)query.execute(project);
}
/**
* Returns a List of Dependency for the specified Project.
* @param project the Project to retrieve dependencies of
* @return a List of Dependency objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getDependencies(Project project) {
final Query query = pm.newQuery(Dependency.class, "project == :project");
query.getFetchPlan().addGroup(Dependency.FetchGroup.COMPONENT_ONLY.name());
query.setOrdering("component.name asc");
if (filter != null) {
query.setFilter("component.name.toLowerCase().matches(:name)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query, project);
}
/**
* Returns a List of Dependency for the specified Component.
* @param component the Component to retrieve dependencies of
* @return a List of Dependency objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getDependencies(Component component) {
final Query query = pm.newQuery(Dependency.class, "component == :component");
query.setOrdering("id asc");
query.getFetchPlan().addGroup(Dependency.FetchGroup.PROJECT_ONLY.name());
return execute(query, component);
}
/**
* Deletes all dependencies for the specified Project.
* @param project the Project to delete dependencies of
*/
@SuppressWarnings("unchecked")
public void deleteDependencies(Project project) {
final Query query = pm.newQuery(Dependency.class, "project == :project");
query.getFetchPlan().addGroup(Dependency.FetchGroup.PROJECT_ONLY.name());
query.deletePersistentAll(project);
}
/**
* Deletes all dependencies for the specified Component.
* @param component the Component to delete dependencies of
*/
@SuppressWarnings("unchecked")
public void deleteDependencies(Component component) {
final Query query = pm.newQuery(Dependency.class, "component == :component");
query.getFetchPlan().addGroup(Dependency.FetchGroup.COMPONENT_ONLY.name());
query.deletePersistentAll(component);
}
/**
* Returns the number of Dependency objects for the specified Project.
* @param project the Project to retrieve dependencies of
* @return the total number of dependencies for the project
*/
@SuppressWarnings("unchecked")
public long getDependencyCount(Project project) {
final Query query = pm.newQuery(Dependency.class, "project == :project");
return getCount(query, project);
}
/**
* Returns the number of Dependency objects for the specified Component.
* @param component the Component to retrieve dependencies of
* @return the total number of dependencies for the component
*/
@SuppressWarnings("unchecked")
public long getDependencyCount(Component component) {
final Query query = pm.newQuery(Dependency.class, "component == :component");
return getCount(query, component);
}
/**
* Returns a Dependency for the specified Project and Component.
* @param project the Project the component is part of
* @param component the Component
* @return a Dependency object, or null if not found
*/
@SuppressWarnings("unchecked")
public Dependency getDependency(Project project, Component component) {
final Query query = pm.newQuery(Dependency.class, "project == :project && component == :component");
query.getFetchPlan().addGroup(Dependency.FetchGroup.ALL.name());
final List<Dependency> result = (List<Dependency>) query.execute(project, component);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns the number of total Vulnerability objects.
* @return the total number of vulnerabilities for the component
*/
@SuppressWarnings("unchecked")
public long getVulnerabilityCount() {
final Query query = pm.newQuery(Vulnerability.class);
return getCount(query);
}
/**
* Returns a List of all Vulnerabilities.
* @return a List of Vulnerability objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getVulnerabilities() {
final Query query = pm.newQuery(Vulnerability.class);
query.setOrdering("id asc");
if (filter != null) {
query.setFilter("vulnId.toLowerCase().matches(:vulnId)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query);
}
/**
* Returns the number of Vulnerability objects for the specified Component.
* @param component the Component to retrieve vulnerabilities of
* @return the total number of vulnerabilities for the component
*/
@SuppressWarnings("unchecked")
public long getVulnerabilityCount(Component component) {
final Query query = pm.newQuery(Vulnerability.class, "components.contains(:component)");
return getCount(query, component);
}
/**
* Returns a List of Vulnerability for the specified Component.
* @param component the Component to retrieve vulnerabilities of
* @return a List of Vulnerability objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getVulnerabilities(Component component) {
final Query query = pm.newQuery(Vulnerability.class, "components.contains(:component)");
query.setOrdering("id asc");
return execute(query, component);
}
/**
* Returns a List of Vulnerability for the specified Component.
* This method if designed NOT to provide paginated results.
* @param component the Component to retrieve vulnerabilities of
* @return a List of Vulnerability objects
*/
@SuppressWarnings("unchecked")
public List<Vulnerability> getAllVulnerabilities(Component component) {
final Query query = pm.newQuery(Vulnerability.class, "components.contains(:component)");
return (List<Vulnerability>)query.execute(component);
}
/**
* Returns the number of Vulnerability objects for the specified Project.
* @param project the Project to retrieve vulnerabilities of
* @return the total number of vulnerabilities for the project
*/
@SuppressWarnings("unchecked")
public long getVulnerabilityCount(Project project) {
long total = 0;
final List<Dependency> dependencies = getDependencies(project).getList(Dependency.class);
for (Dependency dependency: dependencies) {
total += getVulnerabilityCount(dependency.getComponent());
}
return total;
}
/**
* Returns a List of Vulnerability for the specified Project.
* This method is unique and used by third-party integrations
* such as ThreadFix for the retrieval of vulnerabilities from
* a specific project along with the affected component(s).
* @param project the Project to retrieve vulnerabilities of
* @return a List of Vulnerability objects
*/
@SuppressWarnings("unchecked")
public List<Vulnerability> getVulnerabilities(Project project) {
final List<Vulnerability> vulnerabilities = new ArrayList<>();
final List<Dependency> dependencies = getAllDependencies(project);
for (Dependency dependency: dependencies) {
final Collection<Vulnerability> componentVulns = pm.detachCopyAll(
getAllVulnerabilities(dependency.getComponent())
);
for (Vulnerability componentVuln: componentVulns) {
componentVuln.setComponents(Arrays.asList(pm.detachCopy(dependency.getComponent())));
}
vulnerabilities.addAll(componentVulns);
}
return vulnerabilities;
}
/**
* Returns a List of Projects affected by a specific vulnerability.
* @param vulnerability the vulnerability to query on
* @return a List of Projects
*/
@SuppressWarnings("unchecked")
public List<Project> getProjects(Vulnerability vulnerability) {
final List<Project> projects = new ArrayList<>();
for (Component component: vulnerability.getComponents()) {
for (Dependency dependency: getDependencies(component).getList(Dependency.class)) {
projects.add(dependency.getProject());
}
}
return projects;
}
/**
* Retrieves the current VulnerabilityMetrics
* @return a VulnerabilityMetrics object
*/
@SuppressWarnings("unchecked")
public List<VulnerabilityMetrics> getVulnerabilityMetrics() {
final Query query = pm.newQuery(VulnerabilityMetrics.class);
query.setOrdering("year asc, month asc");
return execute(query).getList(VulnerabilityMetrics.class);
}
/**
* Retrieves the most recent PortfolioMetrics.
* @return a PortfolioMetrics object
*/
@SuppressWarnings("unchecked")
public PortfolioMetrics getMostRecentPortfolioMetrics() {
final Query query = pm.newQuery(PortfolioMetrics.class);
query.setOrdering("lastOccurrence desc");
final List<PortfolioMetrics> result = execute(query).getList(PortfolioMetrics.class);
return result.size() == 0 ? null : result.get(0);
}
/**
* Retrieves PortfolioMetrics in descending order starting with the most recent.
* @return a PaginatedResult object
*/
@SuppressWarnings("unchecked")
public PaginatedResult getPortfolioMetrics() {
final Query query = pm.newQuery(PortfolioMetrics.class);
query.setOrdering("lastOccurrence desc");
return execute(query);
}
/**
* Retrieves PortfolioMetrics in ascending order starting with the oldest since the date specified.
* @return a List of metrics
*/
@SuppressWarnings("unchecked")
public List<PortfolioMetrics> getPortfolioMetricsSince(Date since) {
final Query query = pm.newQuery(PortfolioMetrics.class, "lastOccurrence >= :since");
query.setOrdering("lastOccurrence asc");
return (List<PortfolioMetrics>)query.execute(since);
}
/**
* Retrieves the most recent ProjectMetrics.
* @param project the Project to retrieve metrics for
* @return a ProjectMetrics object
*/
@SuppressWarnings("unchecked")
public ProjectMetrics getMostRecentProjectMetrics(Project project) {
final Query query = pm.newQuery(ProjectMetrics.class, "project == :project");
query.setOrdering("lastOccurrence desc");
final List<ProjectMetrics> result = execute(query, project).getList(ProjectMetrics.class);
return result.size() == 0 ? null : result.get(0);
}
/**
* Retrieves ProjectMetrics in descending order starting with the most recent.
* @param project the Project to retrieve metrics for
* @return a PaginatedResult object
*/
@SuppressWarnings("unchecked")
public PaginatedResult getProjectMetrics(Project project) {
final Query query = pm.newQuery(ProjectMetrics.class, "project == :project");
query.setOrdering("lastOccurrence desc");
return execute(query, project);
}
/**
* Retrieves ProjectMetrics in ascending order starting with the oldest since the date specified.
* @return a List of metrics
*/
@SuppressWarnings("unchecked")
public List<ProjectMetrics> getProjectMetricsSince(Project project, Date since) {
final Query query = pm.newQuery(PortfolioMetrics.class, "project == :project && lastOccurrence >= :since");
query.setOrdering("lastOccurrence asc");
return (List<ProjectMetrics>)query.execute(project, since);
}
/**
* Retrieves the most recent ComponentMetrics.
* @param component the Component to retrieve metrics for
* @return a ComponentMetrics object
*/
@SuppressWarnings("unchecked")
public ComponentMetrics getMostRecentComponentMetrics(Component component) {
final Query query = pm.newQuery(ComponentMetrics.class, "component == :component");
query.setOrdering("lastOccurrence desc");
final List<ComponentMetrics> result = execute(query, component).getList(ComponentMetrics.class);
return result.size() == 0 ? null : result.get(0);
}
/**
* Retrieves ComponentMetrics in descending order starting with the most recent.
* @param component the Component to retrieve metrics for
* @return a PaginatedResult object
*/
@SuppressWarnings("unchecked")
public PaginatedResult getComponentMetrics(Component component) {
final Query query = pm.newQuery(ComponentMetrics.class, "component == :component");
query.setOrdering("lastOccurrence desc");
return execute(query, component);
}
/**
* Retrieves ComponentMetrics in ascending order starting with the oldest since the date specified.
* @return a List of metrics
*/
@SuppressWarnings("unchecked")
public List<ComponentMetrics> getComponentMetricsSince(Component component, Date since) {
final Query query = pm.newQuery(PortfolioMetrics.class, "component == :component && lastOccurrence >= :since");
query.setOrdering("lastOccurrence asc");
return (List<ComponentMetrics>)query.execute(component, since);
}
/**
* Synchronizes VulnerabilityMetrics.
*/
public void synchronizeVulnerabilityMetrics(VulnerabilityMetrics metric) {
final Query query;
final List<VulnerabilityMetrics> result;
if (metric.getMonth() == null) {
query = pm.newQuery(VulnerabilityMetrics.class, "year == :year && month == null");
result = execute(query, metric.getYear()).getList(VulnerabilityMetrics.class);
} else {
query = pm.newQuery(VulnerabilityMetrics.class, "year == :year && month == :month");
result = execute(query, metric.getYear(), metric.getMonth()).getList(VulnerabilityMetrics.class);
}
if (result.size() == 1) {
VulnerabilityMetrics m = result.get(0);
m.setCount(metric.getCount());
m.setMeasuredAt(metric.getMeasuredAt());
persist(m);
} else if (result.size() == 0) {
persist(metric);
} else {
delete(result);
persist(metric);
}
}
/**
* Deleted all metrics associated for the specified Project.
* @param project the Project to delete metrics for
*/
public void deleteMetrics(Project project) {
final Query query = pm.newQuery(ProjectMetrics.class, "project == :project");
query.deletePersistentAll(project);
}
/**
* Deleted all metrics associated for the specified Component.
* @param component the Component to delete metrics for
*/
public void deleteMetrics(Component component) {
final Query query = pm.newQuery(ComponentMetrics.class, "component == :component");
query.deletePersistentAll(component);
}
/**
* Binds the two objects together in a corresponding join table.
* @param project a Project object
* @param tags a List of Tag objects
*/
@SuppressWarnings("unchecked")
public void bind(Project project, List<Tag> tags) {
final Query query = pm.newQuery(Tag.class, "projects.contains(:project)");
List<Tag> currentProjectTags = (List<Tag>)query.execute(project);
pm.currentTransaction().begin();
for (Tag tag: currentProjectTags) {
if (!tags.contains(tag)) {
tag.getProjects().remove(project);
}
}
project.setTags(tags);
for (Tag tag: tags) {
tag.getProjects().add(project);
}
pm.currentTransaction().commit();
}
/**
* Binds the two objects together in a corresponding join table.
* @param scan a Scan object
* @param component a Component object
*/
public void bind(Scan scan, Component component) {
boolean bound = scan.getComponents().stream().anyMatch(s -> s.getId() == scan.getId());
if (!bound) {
pm.currentTransaction().begin();
scan.getComponents().add(component);
component.getScans().add(scan);
pm.currentTransaction().commit();
}
}
/**
* Binds the two objects together in a corresponding join table.
* @param bom a Bom object
* @param component a Component object
*/
public void bind(Bom bom, Component component) {
boolean bound = bom.getComponents().stream().anyMatch(b -> b.getId() == bom.getId());
if (!bound) {
pm.currentTransaction().begin();
bom.getComponents().add(component);
component.getBoms().add(bom);
pm.currentTransaction().commit();
}
}
/**
* Binds the two objects together in a corresponding join table.
* @param component a Component object
* @param vulnerability a Vulnerability object
*/
public void bind(Component component, Vulnerability vulnerability) {
boolean bound = vulnerability.getComponents().stream().anyMatch(c -> c.getId() == component.getId());
if (!bound) {
pm.currentTransaction().begin();
vulnerability.getComponents().add(component);
component.getVulnerabilities().add(vulnerability);
pm.currentTransaction().commit();
}
}
/**
* Commits the Lucene inxex.
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @param clazz the indexable class to commit the index of
*/
public void commitSearchIndex(boolean commitIndex, Class clazz) {
if (commitIndex) {
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.COMMIT, clazz));
}
}
/**
* Commits the Lucene inxex.
* @param clazz the indexable class to commit the index of
*/
public void commitSearchIndex(Class clazz) {
commitSearchIndex(true, clazz);
}
}
| src/main/java/org/owasp/dependencytrack/persistence/QueryManager.java | /*
* This file is part of Dependency-Track.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) Steve Springett. All Rights Reserved.
*/
package org.owasp.dependencytrack.persistence;
import alpine.Config;
import alpine.event.framework.SingleThreadedEventService;
import alpine.persistence.AlpineQueryManager;
import alpine.persistence.PaginatedResult;
import alpine.resources.AlpineRequest;
import org.apache.commons.lang3.StringUtils;
import org.owasp.dependencytrack.event.IndexEvent;
import org.owasp.dependencytrack.model.Bom;
import org.owasp.dependencytrack.model.Component;
import org.owasp.dependencytrack.model.ComponentMetrics;
import org.owasp.dependencytrack.model.Cwe;
import org.owasp.dependencytrack.model.Dependency;
import org.owasp.dependencytrack.model.Evidence;
import org.owasp.dependencytrack.model.License;
import org.owasp.dependencytrack.model.PortfolioMetrics;
import org.owasp.dependencytrack.model.Project;
import org.owasp.dependencytrack.model.ProjectMetrics;
import org.owasp.dependencytrack.model.ProjectProperty;
import org.owasp.dependencytrack.model.Scan;
import org.owasp.dependencytrack.model.Tag;
import org.owasp.dependencytrack.model.Vulnerability;
import org.owasp.dependencytrack.model.VulnerabilityMetrics;
import javax.jdo.FetchPlan;
import javax.jdo.Query;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.UUID;
/**
* This QueryManager provides a concrete extension of {@link AlpineQueryManager} by
* providing methods that operate on the Dependency-Track specific models.
*
* @author Steve Springett
* @since 3.0.0
*/
public class QueryManager extends AlpineQueryManager {
private static final boolean ENFORCE_AUTHORIZATION = Config.getInstance().getPropertyAsBoolean(Config.AlpineKey.ENFORCE_AUTHORIZATION);
/**
* Default constructor.
*/
public QueryManager() {
super();
}
/**
* Constructs a new QueryManager.
* @param request an AlpineRequest object
*/
public QueryManager(final AlpineRequest request) {
super(request);
}
/**
* Returns a list of all projets.
* @return a List of Projects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getProjects() {
final Query query = pm.newQuery(Project.class);
query.setOrdering("name asc");
if (filter != null) {
query.setFilter("name.toLowerCase().matches(:name)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query);
}
/**
* Returns a project by it's name.
* @param name the name of the Project
* @return a Project object, or null if not found
*/
@SuppressWarnings("unchecked")
public Project getProject(String name) {
final Query query = pm.newQuery(Project.class, "name == :name");
final List<Project> result = (List<Project>) query.execute(name);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns a paginated result of projects by tag.
* @param tag the tag associated with the Project
* @return a List of Projects that contain the tag
*/
public PaginatedResult getProjects(Tag tag) {
final Query query = pm.newQuery(Project.class, "tags.contains(:tag)");
query.setOrdering("name asc");
return execute(query, tag);
}
/**
* Returns a list of Tag objects what have been resolved. It resolved
* tags by querying the database to retrieve the tag. If the tag does
* not exist, the tag will be created and returned with other resolved
* tags.
* @param tags a List of Tags to resolve
* @return List of resolved Tags
*/
@SuppressWarnings("unchecked")
public synchronized List<Tag> resolveTags(List<Tag> tags) {
if (tags == null) {
return new ArrayList<>();
}
final List<Tag> resolvedTags = new ArrayList<>();
final List<String> unresolvedTags = new ArrayList<>();
for (Tag tag: tags) {
final String trimmedTag = StringUtils.trimToNull(tag.getName());
if (trimmedTag != null) {
final Tag resolvedTag = getTagByName(trimmedTag);
if (resolvedTag != null) {
resolvedTags.add(resolvedTag);
} else {
unresolvedTags.add(trimmedTag);
}
}
}
resolvedTags.addAll(createTags(unresolvedTags));
return resolvedTags;
}
/**
* Returns a list of Tag objects by name.
* @param name the name of the Tag
* @return a Tag object
*/
@SuppressWarnings("unchecked")
public Tag getTagByName(String name) {
final String trimmedTag = StringUtils.trimToNull(name);
final Query query = pm.newQuery(Tag.class, "name == :name");
final List<Tag> result = (List<Tag>) query.execute(trimmedTag);
return result.size() == 0 ? null : result.get(0);
}
/**
* Creates a new Tag object with the specified name.
* @param name the name of the Tag to create
* @return the created Tag object
*/
public Tag createTag(String name) {
final String trimmedTag = StringUtils.trimToNull(name);
final Tag resolvedTag = getTagByName(trimmedTag);
if (resolvedTag != null) {
return resolvedTag;
}
final Tag tag = new Tag();
tag.setName(trimmedTag);
return persist(tag);
}
/**
* Creates one or more Tag objects from the specified name(s).
* @param names the name(s) of the Tag(s) to create
* @return the created Tag object(s)
*/
public List<Tag> createTags(List<String> names) {
final List<Tag> newTags = new ArrayList<>();
for (String name: names) {
final String trimmedTag = StringUtils.trimToNull(name);
if (getTagByName(trimmedTag) == null) {
final Tag tag = new Tag();
tag.setName(trimmedTag);
newTags.add(tag);
}
}
return new ArrayList<>(persist(newTags));
}
/**
* Creates a new Project.
* @param name the name of the project to create
* @param description a description of the project
* @param version the project version
* @param tags a List of Tags - these will be resolved if necessary
* @param parent an optional parent Project
* @param purl an optional Package URL
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return the created Project
*/
public Project createProject(String name, String description, String version, List<Tag> tags, Project parent, String purl, boolean commitIndex) {
final Project project = new Project();
project.setName(name);
project.setDescription(description);
project.setVersion(version);
project.setTags(resolveTags(tags));
if (parent != null) {
project.setParent(parent);
}
project.setPurl(purl);
final Project result = persist(project);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.CREATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Project.class);
return result;
}
/**
* Updates an existing Project.
* @param uuid the uuid of the project to update
* @param name the name of the project
* @param description a description of the project
* @param version the project version
* @param tags a List of Tags - these will be resolved if necessary
* @param purl an optional Package URL
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return the updated Project
*/
public Project updateProject(UUID uuid, String name, String description, String version, List<Tag> tags, String purl, boolean commitIndex) {
final Project project = getObjectByUuid(Project.class, uuid);
project.setName(name);
project.setDescription(description);
project.setVersion(version);
project.setPurl(purl);
List<Tag> resolvedTags = resolveTags(tags);
bind(project, resolvedTags);
final Project result = persist(project);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.UPDATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Project.class);
return result;
}
/**
* Updates the last time a scan was imported.
* @param date the date of the last scan import
* @return the updated Project
*/
public Project updateLastScanImport(Project p, Date date) {
final Project project = getObjectById(Project.class, p.getId());
project.setLastScanImport(date);
return persist(project);
}
/**
* Updates the last time a bom was imported.
* @param date the date of the last bom import
* @return the updated Project
*/
public Project updateLastBomImport(Project p, Date date) {
final Project project = getObjectById(Project.class, p.getId());
project.setLastBomImport(date);
return persist(project);
}
/**
* Deletes a Project and all objects dependant on the project.
* @param project the Project to delete
*/
public void recursivelyDelete(Project project) {
if (project.getChildren() != null) {
for (Project child: project.getChildren()) {
recursivelyDelete(child);
}
}
pm.getFetchPlan().setDetachmentOptions(FetchPlan.DETACH_LOAD_FIELDS);
final Project result = pm.getObjectById(Project.class, project.getId());
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.DELETE, pm.detachCopy(result)));
deleteMetrics(project);
deleteDependencies(project);
deleteScans(project);
deleteBoms(project);
delete(project.getProperties());
delete(getScans(project));
delete(getBoms(project));
delete(project.getChildren());
delete(project);
}
/**
* Creates a key/value pair (ProjectProperty) for the specified Project.
* @param project the Project to create the property for
* @param key the key of the property
* @param value the value of the property
* @return the created ProjectProperty object
*/
public ProjectProperty createProjectProperty(Project project, String key, String value) {
final ProjectProperty property = new ProjectProperty();
property.setProject(project);
property.setKey(key);
property.setValue(value);
return persist(property);
}
/**
* Creates a new Scan.
* @param project the Project to create a Scan for
* @param executed the Date when the scan was executed
* @param imported the Date when the scan was imported
* @return a new Scan object
*/
public Scan createScan(Project project, Date executed, Date imported) {
final Scan scan = new Scan();
scan.setExecuted(executed);
scan.setImported(imported);
scan.setProject(project);
return persist(scan);
}
/**
* Returns a list of all Scans for the specified Project.
* @param project the Project to retrieve scans for
* @return a List of Scans
*/
@SuppressWarnings("unchecked")
public List<Scan> getScans(Project project) {
final Query query = pm.newQuery(Scan.class, "project == :project");
return (List<Scan>) query.execute(project);
}
/**
* Deletes scans belonging to the specified Project.
* @param project the Project to delete scans for
*/
public void deleteScans(Project project) {
final Query query = pm.newQuery(Scan.class, "project == :project");
query.deletePersistentAll(project);
}
/**
* Deletes scans belonging to the specified Component.
* @param component the Component to delete scans for
*/
@SuppressWarnings("unchecked")
public void deleteScans(Component component) {
final Query query = pm.newQuery(Scan.class, "components.contains(component)");
for (Scan scan: (List<Scan>) query.execute(component)) {
scan.getComponents().remove(component);
persist(scan);
}
}
/**
* Creates a new Bom.
* @param project the Project to create a Bom for
* @param imported the Date when the bom was imported
* @return a new Bom object
*/
public Bom createBom(Project project, Date imported) {
final Bom bom = new Bom();
bom.setImported(imported);
bom.setProject(project);
return persist(bom);
}
/**
* Returns a list of all Bom for the specified Project.
* @param project the Project to retrieve boms for
* @return a List of Boms
*/
@SuppressWarnings("unchecked")
public List<Bom> getBoms(Project project) {
final Query query = pm.newQuery(Bom.class, "project == :project");
return (List<Bom>) query.execute(project);
}
/**
* Deletes boms belonging to the specified Project.
* @param project the Project to delete boms for
*/
public void deleteBoms(Project project) {
final Query query = pm.newQuery(Bom.class, "project == :project");
query.deletePersistentAll(project);
}
/**
* Deletes boms belonging to the specified Component.
* @param component the Component to delete boms for
*/
@SuppressWarnings("unchecked")
public void deleteBoms(Component component) {
final Query query = pm.newQuery(Bom.class, "components.contains(component)");
for (Bom bom: (List<Bom>) query.execute(component)) {
bom.getComponents().remove(component);
persist(bom);
}
}
/**
* Returns a list of all Components defined in the datastore.
* @return a List of Components
*/
@SuppressWarnings("unchecked")
public PaginatedResult getComponents() {
final Query query = pm.newQuery(Component.class);
query.setOrdering("name asc");
if (filter != null) {
query.setFilter("name.toLowerCase().matches(:name)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query);
}
/**
* Returns a Component by its hash. Supports MD5, SHA-1, SHA-256, SHA-512, SHA3-256, and SHA3-512 hashes.
* @param hash the hash of the component to retrieve
* @return a Component, or null if not found
*/
@SuppressWarnings("unchecked")
public Component getComponentByHash(String hash) {
if (hash == null) {
return null;
}
final Query query;
if (hash.length() == 32) {
query = pm.newQuery(Component.class, "md5 == :hash");
} else if (hash.length() == 40) {
query = pm.newQuery(Component.class, "sha1 == :hash");
} else if (hash.length() == 64) {
query = pm.newQuery(Component.class, "sha256 == :hash || sha3_256 == :hash");
} else if (hash.length() == 128) {
query = pm.newQuery(Component.class, "sha512 == :hash || sha3_512 == :hash");
} else {
return null;
}
final List<Component> result = (List<Component>) query.execute(hash);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns a Component by group, name, and version.
* @param group the group of the component to retrieve
* @param name the name of the component to retrieve
* @param version the version of the component to retrieve
* @return a Component, or null if not found
*/
@SuppressWarnings("unchecked")
public Component getComponentByAttributes(String group, String name, String version) {
final Query query = pm.newQuery(Component.class, "group == :group && name == :name && version == :version");
final List<Component> result = (List<Component>) query.execute(group, name, version);
return result.size() == 0 ? null : result.get(0);
}
/**
* Creates a new Component.
* @param component the Component to persist
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a new Component
*/
public Component createComponent(Component component, boolean commitIndex) {
final Component result = persist(component);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.CREATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Component.class);
return result;
}
/**
* Updated an existing Component.
* @param transientComponent the component to update
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a Component
*/
public Component updateComponent(Component transientComponent, boolean commitIndex) {
final Component component = getObjectByUuid(Component.class, transientComponent.getUuid());
component.setName(transientComponent.getName());
component.setVersion(transientComponent.getVersion());
component.setGroup(transientComponent.getGroup());
component.setFilename(transientComponent.getFilename());
component.setMd5(transientComponent.getMd5());
component.setSha1(transientComponent.getSha1());
component.setSha256(transientComponent.getSha256());
component.setSha512(transientComponent.getSha512());
component.setSha3_256(transientComponent.getSha3_256());
component.setSha3_512(transientComponent.getSha3_512());
component.setDescription(transientComponent.getDescription());
component.setCopyright(transientComponent.getCopyright());
component.setLicense(transientComponent.getLicense());
component.setResolvedLicense(transientComponent.getResolvedLicense());
component.setParent(transientComponent.getParent());
component.setCpe(transientComponent.getCpe());
component.setPurl(transientComponent.getPurl());
final Component result = persist(component);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.UPDATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Component.class);
return result;
}
/**
* Deletes a Component and all objects dependant on the component.
* @param component the Component to delete
* @param commitIndex specifies if the search index should be committed (an expensive operation)
*/
public void recursivelyDelete(Component component, boolean commitIndex) {
if (component.getChildren() != null) {
for (Component child: component.getChildren()) {
recursivelyDelete(child, false);
}
}
pm.getFetchPlan().setDetachmentOptions(FetchPlan.DETACH_LOAD_FIELDS);
final Component result = pm.getObjectById(Component.class, component.getId());
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.DELETE, pm.detachCopy(result)));
deleteMetrics(component);
deleteDependencies(component);
deleteScans(component);
deleteBoms(component);
delete(component);
commitSearchIndex(commitIndex, Component.class);
}
/**
* Creates new evidence for a Component.
* @param component the Component to create evidence for
* @param type the type of evidence
* @param confidenceScore the confidence score
* @param source the source of where the evidence was obtained from
* @param name the name of the evidence
* @param value the value of the evidence
* @return a new Evidence object
*/
public Evidence createEvidence(Component component, String type, int confidenceScore,
String source, String name, String value) {
final Evidence evidence = new Evidence();
evidence.setComponent(component);
evidence.setType(type);
evidence.setConfidence(confidenceScore);
evidence.setSource(source);
evidence.setName(name);
evidence.setValue(value);
return persist(evidence);
}
/**
* Returns a List of all License objects.
* @return a List of all License objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getLicenses() {
final Query query = pm.newQuery(License.class);
query.setOrdering("name asc");
if (filter != null) {
query.setFilter("name.toLowerCase().matches(:filter) || licenseId.toLowerCase().matches(:filter)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query);
}
/**
* Returns a License object from the specified SPDX license ID.
* @param licenseId the SPDX license ID to retrieve
* @return a License object, or null if not found
*/
@SuppressWarnings("unchecked")
public License getLicense(String licenseId) {
final Query query = pm.newQuery(License.class, "licenseId == :licenseId");
final List<License> result = (List<License>) query.execute(licenseId);
return result.size() == 0 ? null : result.get(0);
}
/**
* Creates a new License.
* @param license the License object to create
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a created License object
*/
public License createLicense(License license, boolean commitIndex) {
final License result = persist(license);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.CREATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, License.class);
return result;
}
/**
* Creates a new Vulnerability.
* @param vulnerability the vulnerability to persist
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a new vulnerability object
*/
public Vulnerability createVulnerability(Vulnerability vulnerability, boolean commitIndex) {
final Vulnerability result = persist(vulnerability);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.CREATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Vulnerability.class);
return result;
}
/**
* Updates a vulnerability.
* @param transientVulnerability the vulnerability to update
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a Vulnerability object
*/
public Vulnerability updateVulnerability(Vulnerability transientVulnerability, boolean commitIndex) {
final Vulnerability vulnerability;
if (transientVulnerability.getId() > 0) {
vulnerability = getObjectById(Vulnerability.class, transientVulnerability.getId());
} else {
vulnerability = getVulnerabilityByVulnId(transientVulnerability.getSource(), transientVulnerability.getVulnId());
}
if (vulnerability != null) {
vulnerability.setCreated(transientVulnerability.getCreated());
vulnerability.setPublished(transientVulnerability.getPublished());
vulnerability.setUpdated(transientVulnerability.getUpdated());
vulnerability.setVulnId(transientVulnerability.getVulnId());
vulnerability.setSource(transientVulnerability.getSource());
vulnerability.setCredits(transientVulnerability.getCredits());
vulnerability.setVulnerableVersions(transientVulnerability.getVulnerableVersions());
vulnerability.setPatchedVersions(transientVulnerability.getPatchedVersions());
vulnerability.setDescription(transientVulnerability.getDescription());
vulnerability.setTitle(transientVulnerability.getTitle());
vulnerability.setSubTitle(transientVulnerability.getSubTitle());
vulnerability.setReferences(transientVulnerability.getReferences());
vulnerability.setRecommendation(transientVulnerability.getRecommendation());
vulnerability.setCwe(transientVulnerability.getCwe());
vulnerability.setCvssV2Vector(transientVulnerability.getCvssV2Vector());
vulnerability.setCvssV2BaseScore(transientVulnerability.getCvssV2BaseScore());
vulnerability.setCvssV2ImpactSubScore(transientVulnerability.getCvssV2ImpactSubScore());
vulnerability.setCvssV2ExploitabilitySubScore(transientVulnerability.getCvssV2ExploitabilitySubScore());
vulnerability.setCvssV3Vector(transientVulnerability.getCvssV3Vector());
vulnerability.setCvssV3BaseScore(transientVulnerability.getCvssV3BaseScore());
vulnerability.setCvssV3ImpactSubScore(transientVulnerability.getCvssV3ImpactSubScore());
vulnerability.setCvssV3ExploitabilitySubScore(transientVulnerability.getCvssV3ExploitabilitySubScore());
vulnerability.setMatchedAllPreviousCPE(transientVulnerability.getMatchedAllPreviousCPE());
vulnerability.setMatchedCPE(transientVulnerability.getMatchedCPE());
final Vulnerability result = persist(vulnerability);
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.UPDATE, pm.detachCopy(result)));
commitSearchIndex(commitIndex, Vulnerability.class);
return result;
}
return null;
}
/**
* Synchronizes a vulnerability. Method first checkes to see if the vulnerability already
* exists and if so, updates the vulnerability. If the vulnerability does not already exist,
* this method will create a new vulnerability.
* @param vulnerability the vulnerability to synchronize
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @return a Vulnerability object
*/
public Vulnerability synchronizeVulnerability(Vulnerability vulnerability, boolean commitIndex) {
Vulnerability result = updateVulnerability(vulnerability, commitIndex);
if (result == null) {
result = createVulnerability(vulnerability, commitIndex);
}
return result;
}
/**
* Returns a vulnerability by it's name (i.e. CVE-2017-0001)
* @param vulnId the name of the vulnerability
* @return the matching Vulnerability object, or null if not found
*/
@SuppressWarnings("unchecked")
public Vulnerability getVulnerabilityByVulnId(String vulnId) {
final Query query = pm.newQuery(Vulnerability.class, "vulnId == :vulnId");
query.getFetchPlan().addGroup(Vulnerability.FetchGroup.COMPONENTS.name());
final List<Vulnerability> result = (List<Vulnerability>) query.execute(vulnId);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns a vulnerability by it's name (i.e. CVE-2017-0001) and source.
* @param source the source of the vulnerability
* @param vulnId the name of the vulnerability
* @return the matching Vulnerability object, or null if not found
*/
@SuppressWarnings("unchecked")
public Vulnerability getVulnerabilityByVulnId(String source, String vulnId) {
final Query query = pm.newQuery(Vulnerability.class, "source == :source && vulnId == :vulnId");
query.getFetchPlan().addGroup(Vulnerability.FetchGroup.COMPONENTS.name());
final List<Vulnerability> result = (List<Vulnerability>) query.execute(source, vulnId);
return result.size() == 0 ? null : result.get(0);
}
/**
* Adds a vulnerability to a component.
* @param vulnerability the vulnerabillity to add
* @param component the component affected by the vulnerabiity
*/
@SuppressWarnings("unchecked")
public void addVulnerability(Vulnerability vulnerability, Component component) {
vulnerability = getObjectById(Vulnerability.class, vulnerability.getId());
component = getObjectById(Component.class, component.getId());
if (!contains(vulnerability, component)) {
pm.currentTransaction().begin();
component.addVulnerability(vulnerability);
pm.currentTransaction().commit();
}
}
/**
* Removes a vulnerability from a component.
* @param vulnerability the vulnerabillity to remove
* @param component the component unaffected by the vulnerabiity
*/
@SuppressWarnings("unchecked")
public void removeVulnerability(Vulnerability vulnerability, Component component) {
vulnerability = getObjectById(Vulnerability.class, vulnerability.getId());
component = getObjectById(Component.class, component.getId());
if (contains(vulnerability, component)) {
pm.currentTransaction().begin();
component.removeVulnerability(vulnerability);
pm.currentTransaction().commit();
}
}
/**
* Determines if a Component is affected by a specific Vulnerability by checking
* {@link Vulnerability#getSource()} and {@link Vulnerability#getVulnId()}.
* @param vulnerability The vulnerability to check if associated with component
* @param component The component to check against
* @return true if vulnerability is associated with the component, false if not
*/
public boolean contains(Vulnerability vulnerability, Component component) {
vulnerability = getObjectById(Vulnerability.class, vulnerability.getId());
component = getObjectById(Component.class, component.getId());
for (Vulnerability vuln: component.getVulnerabilities()) {
if (vuln.getSource() != null && vuln.getSource().equals(vulnerability.getSource())
&& vuln.getVulnId() != null && vuln.getVulnId().equals(vulnerability.getVulnId())) {
return true;
}
}
return false;
}
/**
* Checks if the specified CWE id exists or not. If not, creates
* a new CWE with the specified ID and name. In both cases, the
* CWE will be returned.
* @param id the CWE ID
* @param name the name of the CWE
* @return a CWE object
*/
public Cwe createCweIfNotExist(int id, String name) {
Cwe cwe = getCweById(id);
if (cwe != null) {
return cwe;
}
cwe = new Cwe();
cwe.setCweId(id);
cwe.setName(name);
return persist(cwe);
}
/**
* Returns a CWE by it's CWE-ID.
* @param cweId the CWE-ID
* @return a CWE object, or null if not found
*/
@SuppressWarnings("unchecked")
public Cwe getCweById(int cweId) {
final Query query = pm.newQuery(Cwe.class, "cweId == :cweId");
final List<Cwe> result = (List<Cwe>) query.execute(cweId);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns a complete list of all CWE's.
* @return a List of CWEs
*/
@SuppressWarnings("unchecked")
public PaginatedResult getCwes() {
final Query query = pm.newQuery(Cwe.class);
query.setOrdering("id asc");
if (filter != null) {
query.setFilter("cweId == :cweId || name.toLowerCase().matches(:name)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filter, filterString);
}
return execute(query);
}
/**
* Checks if the specified Dependency exists or not. If not, creates
* a new Dependency with the specified project and component. In both
* cases, the Dependency will be returned.
* @param project the Project
* @param component the Component
* @param addedBy optional string representation of a username
* @param notes any notes on why the dependency exists or its usage
* @return a Dependency object
*/
public Dependency createDependencyIfNotExist(Project project, Component component, String addedBy, String notes) {
Dependency dependency = getDependency(project, component);
if (dependency != null) {
return dependency;
}
dependency = new Dependency();
dependency.setProject(project);
dependency.setComponent(component);
dependency.setAddedBy(addedBy);
dependency.setAddedOn(new Date());
dependency.setNotes(notes);
return persist(dependency);
}
/**
* Checks if the specified Dependency exists or not. If so, removes
* the component as a dependency of the project.
* @param project the Project
* @param component the Component
*/
public void removeDependencyIfExist(Project project, Component component) {
Dependency dependency = getDependency(project, component);
if (dependency != null) {
delete(dependency);
}
}
/**
* Returns a List of all Dependency for the specified Project.
* This method if designed NOT to provide paginated results.
* @param project the Project to retrieve dependencies of
* @return a List of Dependency objects
*/
@SuppressWarnings("unchecked")
public List<Dependency> getAllDependencies(Project project) {
final Query query = pm.newQuery(Dependency.class, "project == :project");
query.getFetchPlan().addGroup(Dependency.FetchGroup.COMPONENT_ONLY.name());
query.setOrdering("component.name asc");
return (List<Dependency>)query.execute(project);
}
/**
* Returns a List of Dependency for the specified Project.
* @param project the Project to retrieve dependencies of
* @return a List of Dependency objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getDependencies(Project project) {
final Query query = pm.newQuery(Dependency.class, "project == :project");
query.getFetchPlan().addGroup(Dependency.FetchGroup.COMPONENT_ONLY.name());
query.setOrdering("component.name asc");
if (filter != null) {
query.setFilter("component.name.toLowerCase().matches(:name)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query, project);
}
/**
* Returns a List of Dependency for the specified Component.
* @param component the Component to retrieve dependencies of
* @return a List of Dependency objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getDependencies(Component component) {
final Query query = pm.newQuery(Dependency.class, "component == :component");
query.getFetchPlan().addGroup(Dependency.FetchGroup.PROJECT_ONLY.name());
return execute(query, component);
}
/**
* Deletes all dependencies for the specified Project.
* @param project the Project to delete dependencies of
*/
@SuppressWarnings("unchecked")
public void deleteDependencies(Project project) {
final Query query = pm.newQuery(Dependency.class, "project == :project");
query.getFetchPlan().addGroup(Dependency.FetchGroup.PROJECT_ONLY.name());
query.deletePersistentAll(project);
}
/**
* Deletes all dependencies for the specified Component.
* @param component the Component to delete dependencies of
*/
@SuppressWarnings("unchecked")
public void deleteDependencies(Component component) {
final Query query = pm.newQuery(Dependency.class, "component == :component");
query.getFetchPlan().addGroup(Dependency.FetchGroup.COMPONENT_ONLY.name());
query.deletePersistentAll(component);
}
/**
* Returns the number of Dependency objects for the specified Project.
* @param project the Project to retrieve dependencies of
* @return the total number of dependencies for the project
*/
@SuppressWarnings("unchecked")
public long getDependencyCount(Project project) {
final Query query = pm.newQuery(Dependency.class, "project == :project");
return getCount(query, project);
}
/**
* Returns the number of Dependency objects for the specified Component.
* @param component the Component to retrieve dependencies of
* @return the total number of dependencies for the component
*/
@SuppressWarnings("unchecked")
public long getDependencyCount(Component component) {
final Query query = pm.newQuery(Dependency.class, "component == :component");
return getCount(query, component);
}
/**
* Returns a Dependency for the specified Project and Component.
* @param project the Project the component is part of
* @param component the Component
* @return a Dependency object, or null if not found
*/
@SuppressWarnings("unchecked")
public Dependency getDependency(Project project, Component component) {
final Query query = pm.newQuery(Dependency.class, "project == :project && component == :component");
query.getFetchPlan().addGroup(Dependency.FetchGroup.ALL.name());
final List<Dependency> result = (List<Dependency>) query.execute(project, component);
return result.size() == 0 ? null : result.get(0);
}
/**
* Returns the number of total Vulnerability objects.
* @return the total number of vulnerabilities for the component
*/
@SuppressWarnings("unchecked")
public long getVulnerabilityCount() {
final Query query = pm.newQuery(Vulnerability.class);
return getCount(query);
}
/**
* Returns a List of all Vulnerabilities.
* @return a List of Vulnerability objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getVulnerabilities() {
final Query query = pm.newQuery(Vulnerability.class);
if (filter != null) {
query.setFilter("vulnId.toLowerCase().matches(:vulnId)");
final String filterString = ".*" + filter.toLowerCase() + ".*";
return execute(query, filterString);
}
return execute(query);
}
/**
* Returns the number of Vulnerability objects for the specified Component.
* @param component the Component to retrieve vulnerabilities of
* @return the total number of vulnerabilities for the component
*/
@SuppressWarnings("unchecked")
public long getVulnerabilityCount(Component component) {
final Query query = pm.newQuery(Vulnerability.class, "components.contains(:component)");
return getCount(query, component);
}
/**
* Returns a List of Vulnerability for the specified Component.
* @param component the Component to retrieve vulnerabilities of
* @return a List of Vulnerability objects
*/
@SuppressWarnings("unchecked")
public PaginatedResult getVulnerabilities(Component component) {
final Query query = pm.newQuery(Vulnerability.class, "components.contains(:component)");
return execute(query, component);
}
/**
* Returns a List of Vulnerability for the specified Component.
* This method if designed NOT to provide paginated results.
* @param component the Component to retrieve vulnerabilities of
* @return a List of Vulnerability objects
*/
@SuppressWarnings("unchecked")
public List<Vulnerability> getAllVulnerabilities(Component component) {
final Query query = pm.newQuery(Vulnerability.class, "components.contains(:component)");
return (List<Vulnerability>)query.execute(component);
}
/**
* Returns the number of Vulnerability objects for the specified Project.
* @param project the Project to retrieve vulnerabilities of
* @return the total number of vulnerabilities for the project
*/
@SuppressWarnings("unchecked")
public long getVulnerabilityCount(Project project) {
long total = 0;
final List<Dependency> dependencies = getDependencies(project).getList(Dependency.class);
for (Dependency dependency: dependencies) {
total += getVulnerabilityCount(dependency.getComponent());
}
return total;
}
/**
* Returns a List of Vulnerability for the specified Project.
* This method is unique and used by third-party integrations
* such as ThreadFix for the retrieval of vulnerabilities from
* a specific project along with the affected component(s).
* @param project the Project to retrieve vulnerabilities of
* @return a List of Vulnerability objects
*/
@SuppressWarnings("unchecked")
public List<Vulnerability> getVulnerabilities(Project project) {
final List<Vulnerability> vulnerabilities = new ArrayList<>();
final List<Dependency> dependencies = getAllDependencies(project);
for (Dependency dependency: dependencies) {
final Collection<Vulnerability> componentVulns = pm.detachCopyAll(
getAllVulnerabilities(dependency.getComponent())
);
for (Vulnerability componentVuln: componentVulns) {
componentVuln.setComponents(Arrays.asList(pm.detachCopy(dependency.getComponent())));
}
vulnerabilities.addAll(componentVulns);
}
return vulnerabilities;
}
/**
* Returns a List of Projects affected by a specific vulnerability.
* @param vulnerability the vulnerability to query on
* @return a List of Projects
*/
@SuppressWarnings("unchecked")
public List<Project> getProjects(Vulnerability vulnerability) {
final List<Project> projects = new ArrayList<>();
for (Component component: vulnerability.getComponents()) {
for (Dependency dependency: getDependencies(component).getList(Dependency.class)) {
projects.add(dependency.getProject());
}
}
return projects;
}
/**
* Retrieves the current VulnerabilityMetrics
* @return a VulnerabilityMetrics object
*/
@SuppressWarnings("unchecked")
public List<VulnerabilityMetrics> getVulnerabilityMetrics() {
final Query query = pm.newQuery(VulnerabilityMetrics.class);
query.setOrdering("year asc, month asc");
return execute(query).getList(VulnerabilityMetrics.class);
}
/**
* Retrieves the most recent PortfolioMetrics.
* @return a PortfolioMetrics object
*/
@SuppressWarnings("unchecked")
public PortfolioMetrics getMostRecentPortfolioMetrics() {
final Query query = pm.newQuery(PortfolioMetrics.class);
query.setOrdering("lastOccurrence desc");
final List<PortfolioMetrics> result = execute(query).getList(PortfolioMetrics.class);
return result.size() == 0 ? null : result.get(0);
}
/**
* Retrieves PortfolioMetrics in descending order starting with the most recent.
* @return a PaginatedResult object
*/
@SuppressWarnings("unchecked")
public PaginatedResult getPortfolioMetrics() {
final Query query = pm.newQuery(PortfolioMetrics.class);
query.setOrdering("lastOccurrence desc");
return execute(query);
}
/**
* Retrieves PortfolioMetrics in ascending order starting with the oldest since the date specified.
* @return a List of metrics
*/
@SuppressWarnings("unchecked")
public List<PortfolioMetrics> getPortfolioMetricsSince(Date since) {
final Query query = pm.newQuery(PortfolioMetrics.class, "lastOccurrence >= :since");
query.setOrdering("lastOccurrence asc");
return (List<PortfolioMetrics>)query.execute(since);
}
/**
* Retrieves the most recent ProjectMetrics.
* @param project the Project to retrieve metrics for
* @return a ProjectMetrics object
*/
@SuppressWarnings("unchecked")
public ProjectMetrics getMostRecentProjectMetrics(Project project) {
final Query query = pm.newQuery(ProjectMetrics.class, "project == :project");
query.setOrdering("lastOccurrence desc");
final List<ProjectMetrics> result = execute(query, project).getList(ProjectMetrics.class);
return result.size() == 0 ? null : result.get(0);
}
/**
* Retrieves ProjectMetrics in descending order starting with the most recent.
* @param project the Project to retrieve metrics for
* @return a PaginatedResult object
*/
@SuppressWarnings("unchecked")
public PaginatedResult getProjectMetrics(Project project) {
final Query query = pm.newQuery(ProjectMetrics.class, "project == :project");
query.setOrdering("lastOccurrence desc");
return execute(query, project);
}
/**
* Retrieves ProjectMetrics in ascending order starting with the oldest since the date specified.
* @return a List of metrics
*/
@SuppressWarnings("unchecked")
public List<ProjectMetrics> getProjectMetricsSince(Project project, Date since) {
final Query query = pm.newQuery(PortfolioMetrics.class, "project == :project && lastOccurrence >= :since");
query.setOrdering("lastOccurrence asc");
return (List<ProjectMetrics>)query.execute(project, since);
}
/**
* Retrieves the most recent ComponentMetrics.
* @param component the Component to retrieve metrics for
* @return a ComponentMetrics object
*/
@SuppressWarnings("unchecked")
public ComponentMetrics getMostRecentComponentMetrics(Component component) {
final Query query = pm.newQuery(ComponentMetrics.class, "component == :component");
query.setOrdering("lastOccurrence desc");
final List<ComponentMetrics> result = execute(query, component).getList(ComponentMetrics.class);
return result.size() == 0 ? null : result.get(0);
}
/**
* Retrieves ComponentMetrics in descending order starting with the most recent.
* @param component the Component to retrieve metrics for
* @return a PaginatedResult object
*/
@SuppressWarnings("unchecked")
public PaginatedResult getComponentMetrics(Component component) {
final Query query = pm.newQuery(ComponentMetrics.class, "component == :component");
query.setOrdering("lastOccurrence desc");
return execute(query, component);
}
/**
* Retrieves ComponentMetrics in ascending order starting with the oldest since the date specified.
* @return a List of metrics
*/
@SuppressWarnings("unchecked")
public List<ComponentMetrics> getComponentMetricsSince(Component component, Date since) {
final Query query = pm.newQuery(PortfolioMetrics.class, "component == :component && lastOccurrence >= :since");
query.setOrdering("lastOccurrence asc");
return (List<ComponentMetrics>)query.execute(component, since);
}
/**
* Synchronizes VulnerabilityMetrics.
*/
public void synchronizeVulnerabilityMetrics(VulnerabilityMetrics metric) {
final Query query;
final List<VulnerabilityMetrics> result;
if (metric.getMonth() == null) {
query = pm.newQuery(VulnerabilityMetrics.class, "year == :year && month == null");
result = execute(query, metric.getYear()).getList(VulnerabilityMetrics.class);
} else {
query = pm.newQuery(VulnerabilityMetrics.class, "year == :year && month == :month");
result = execute(query, metric.getYear(), metric.getMonth()).getList(VulnerabilityMetrics.class);
}
if (result.size() == 1) {
VulnerabilityMetrics m = result.get(0);
m.setCount(metric.getCount());
m.setMeasuredAt(metric.getMeasuredAt());
persist(m);
} else if (result.size() == 0) {
persist(metric);
} else {
delete(result);
persist(metric);
}
}
/**
* Deleted all metrics associated for the specified Project.
* @param project the Project to delete metrics for
*/
public void deleteMetrics(Project project) {
final Query query = pm.newQuery(ProjectMetrics.class, "project == :project");
query.deletePersistentAll(project);
}
/**
* Deleted all metrics associated for the specified Component.
* @param component the Component to delete metrics for
*/
public void deleteMetrics(Component component) {
final Query query = pm.newQuery(ComponentMetrics.class, "component == :component");
query.deletePersistentAll(component);
}
/**
* Binds the two objects together in a corresponding join table.
* @param project a Project object
* @param tags a List of Tag objects
*/
@SuppressWarnings("unchecked")
public void bind(Project project, List<Tag> tags) {
final Query query = pm.newQuery(Tag.class, "projects.contains(:project)");
List<Tag> currentProjectTags = (List<Tag>)query.execute(project);
pm.currentTransaction().begin();
for (Tag tag: currentProjectTags) {
if (!tags.contains(tag)) {
tag.getProjects().remove(project);
}
}
project.setTags(tags);
for (Tag tag: tags) {
tag.getProjects().add(project);
}
pm.currentTransaction().commit();
}
/**
* Binds the two objects together in a corresponding join table.
* @param scan a Scan object
* @param component a Component object
*/
public void bind(Scan scan, Component component) {
boolean bound = scan.getComponents().stream().anyMatch(s -> s.getId() == scan.getId());
if (!bound) {
pm.currentTransaction().begin();
scan.getComponents().add(component);
component.getScans().add(scan);
pm.currentTransaction().commit();
}
}
/**
* Binds the two objects together in a corresponding join table.
* @param bom a Bom object
* @param component a Component object
*/
public void bind(Bom bom, Component component) {
boolean bound = bom.getComponents().stream().anyMatch(b -> b.getId() == bom.getId());
if (!bound) {
pm.currentTransaction().begin();
bom.getComponents().add(component);
component.getBoms().add(bom);
pm.currentTransaction().commit();
}
}
/**
* Binds the two objects together in a corresponding join table.
* @param component a Component object
* @param vulnerability a Vulnerability object
*/
public void bind(Component component, Vulnerability vulnerability) {
boolean bound = vulnerability.getComponents().stream().anyMatch(c -> c.getId() == component.getId());
if (!bound) {
pm.currentTransaction().begin();
vulnerability.getComponents().add(component);
component.getVulnerabilities().add(vulnerability);
pm.currentTransaction().commit();
}
}
/**
* Commits the Lucene inxex.
* @param commitIndex specifies if the search index should be committed (an expensive operation)
* @param clazz the indexable class to commit the index of
*/
public void commitSearchIndex(boolean commitIndex, Class clazz) {
if (commitIndex) {
SingleThreadedEventService.getInstance().publish(new IndexEvent(IndexEvent.Action.COMMIT, clazz));
}
}
/**
* Commits the Lucene inxex.
* @param clazz the indexable class to commit the index of
*/
public void commitSearchIndex(Class clazz) {
commitSearchIndex(true, clazz);
}
}
| Fixed pagination issue in 3.0 master branch that resulted in inconsistent results when paginating through multiple sets of results. This resulted in some pages containing duplicates and omissions of some data altogether.
The issue reported against v1.0 may or may not have been the same issue. v3 is based on entirely different code, different persistence framework and logic, and a different data model.
Interesting that v3 had similar symptoms.
Fix: ensure all paginated results have a default ordering set.
Closes #60
| src/main/java/org/owasp/dependencytrack/persistence/QueryManager.java | Fixed pagination issue in 3.0 master branch that resulted in inconsistent results when paginating through multiple sets of results. This resulted in some pages containing duplicates and omissions of some data altogether. | <ide><path>rc/main/java/org/owasp/dependencytrack/persistence/QueryManager.java
<ide> @SuppressWarnings("unchecked")
<ide> public PaginatedResult getDependencies(Component component) {
<ide> final Query query = pm.newQuery(Dependency.class, "component == :component");
<add> query.setOrdering("id asc");
<ide> query.getFetchPlan().addGroup(Dependency.FetchGroup.PROJECT_ONLY.name());
<ide> return execute(query, component);
<ide> }
<ide> @SuppressWarnings("unchecked")
<ide> public PaginatedResult getVulnerabilities() {
<ide> final Query query = pm.newQuery(Vulnerability.class);
<add> query.setOrdering("id asc");
<ide> if (filter != null) {
<ide> query.setFilter("vulnId.toLowerCase().matches(:vulnId)");
<ide> final String filterString = ".*" + filter.toLowerCase() + ".*";
<ide> @SuppressWarnings("unchecked")
<ide> public PaginatedResult getVulnerabilities(Component component) {
<ide> final Query query = pm.newQuery(Vulnerability.class, "components.contains(:component)");
<add> query.setOrdering("id asc");
<ide> return execute(query, component);
<ide> }
<ide> |
|
Java | mit | d334d135081489cc9b2d1b5c53f08af1dbb5a883 | 0 | Sqooba/traildb-java,Sqooba/traildb-java,Sqooba/traildb-java | package io.sqooba.traildb.test;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import io.sqooba.traildb.TrailDB;
import io.sqooba.traildb.TrailDB.TrailDBBuilder;
import io.sqooba.traildb.TrailDBException;
import io.sqooba.traildb.TrailDBNative;
import mockit.Expectations;
import uk.org.lidalia.slf4jtest.TestLogger;
import uk.org.lidalia.slf4jtest.TestLoggerFactory;
public class TrailDBFailureTest {
private final TrailDBNative traildb = TrailDBNative.INSTANCE;
private TrailDB db;
private final String path = "testdb";
private final String cookie = "12345678123456781234567812345678";
private final String otherCookie = "12121212121212121212121212121212";
private final TestLogger logger = TestLoggerFactory.getTestLogger(TrailDB.class);
@Rule
public ExpectedException expectedEx = ExpectedException.none();
@Before
public void setUp() throws IOException {
// Initialise a TrailDB with some TrailDBEvents.
final TrailDBBuilder builder = new TrailDBBuilder(this.path, new String[] { "field1", "field2" });
builder.add(this.cookie, 120, new String[] { "a", "hinata" });
builder.add(this.cookie, 121, new String[] { "vilya", "" });
builder.add(this.otherCookie, 122, new String[] { "kaguya", "hinata" });
builder.add(this.otherCookie, 123, new String[] { "alongstring", "averyveryverylongstring" });
this.db = builder.build();
}
@After
public void tearDown() throws IOException {
// Clear the TrailDB files/directories created for the tests.
final File f = new File(this.path + ".tdb");
if (f.exists() && !f.isDirectory()) {
f.delete();
}
FileUtils.deleteDirectory(new File(this.path));
TestLoggerFactory.clear();
}
@Test
public void initFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Failed to allocate memory to init a new TrailDB.");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.init();
this.result = null;
}
};
try (TrailDB db = new TrailDB(this.path)) {
// Auto close.
}
}
@Test
public void openFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Failed to open db.");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.open((ByteBuffer)this.any, this.anyString);
this.result = -1;
}
};
try (TrailDB db = new TrailDB(this.path)) {
// Auto close.
}
}
@Test
public void minTimestampOverflow() throws IOException {
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.minTimestamp((ByteBuffer)this.any);
this.result = -1;
}
};
this.db.getMinTimestamp();
assertTrue(this.logger.getLoggingEvents().stream()
.anyMatch(e -> "long overflow, received a negtive value for min timestamp.".equals(e.getMessage())));
}
@Test
public void maxTimestampOverflow() throws IOException {
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.maxTimestamp((ByteBuffer)this.any);
this.result = -1;
}
};
this.db.getMaxTimestamp();
assertTrue(this.logger.getLoggingEvents().stream()
.anyMatch(e -> "long overflow, received a negative value for max timestamp.".equals(e.getMessage())));
}
@Test
public void versionOverflow() throws IOException {
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.version((ByteBuffer)this.any);
this.result = -1;
}
};
this.db.getVersion();
assertTrue(this.logger.getLoggingEvents().stream()
.anyMatch(e -> "version overflow.".equals(e.getMessage())));
}
@Test
public void getItemFailure() throws IOException {
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.getItem((ByteBuffer)this.any, this.anyLong, this.anyString);
this.result = -1;
}
};
this.db.getItem(0, "bla");
assertTrue(this.logger.getLoggingEvents().stream()
.anyMatch(e -> "Returned item overflow, deal with it carefully!".equals(e.getMessage())));
}
@Test
public void getUUIDFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Invalid trail ID.");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.getUUID((ByteBuffer)this.any, this.anyLong);
this.result = null;
}
};
this.db.getUUID(1);
}
@Test
public void trailCursorNewFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Memory allocation failed for cursor.");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.cursorNew((ByteBuffer)this.any);
this.result = null;
}
};
this.db.trail(0);
}
@Test
public void trailFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Failed to create cursor with code: -1");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.getTrail((ByteBuffer)this.any, this.anyLong);
this.result = -1;
}
};
this.db.trail(0);
}
}
| src/test/java/io/sqooba/traildb/test/TrailDBFailureTest.java | package io.sqooba.traildb.test;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import io.sqooba.traildb.TrailDB;
import io.sqooba.traildb.TrailDB.TrailDBBuilder;
import io.sqooba.traildb.TrailDBException;
import io.sqooba.traildb.TrailDBNative;
import mockit.Expectations;
import uk.org.lidalia.slf4jtest.TestLogger;
import uk.org.lidalia.slf4jtest.TestLoggerFactory;
public class TrailDBFailureTest {
private TrailDBNative traildb = TrailDBNative.INSTANCE;
private TrailDB db;
private String path = "testdb";
private String cookie = "12345678123456781234567812345678";
private String otherCookie = "12121212121212121212121212121212";
private final TestLogger logger = TestLoggerFactory.getTestLogger(TrailDB.class);
@Rule
public ExpectedException expectedEx = ExpectedException.none();
@Before
public void setUp() throws IOException {
// Initialise a TrailDB with some TrailDBEvents.
TrailDBBuilder builder = new TrailDBBuilder(this.path, new String[] { "field1", "field2" });
builder.add(this.cookie, 120, new String[] { "a", "hinata" });
builder.add(this.cookie, 121, new String[] { "vilya", "" });
builder.add(this.otherCookie, 122, new String[] { "kaguya", "hinata" });
builder.add(this.otherCookie, 123, new String[] { "alongstring", "averyveryverylongstring" });
this.db = builder.build();
}
@After
public void tearDown() throws IOException {
// Clear the TrailDB files/directories created for the tests.
File f = new File(this.path + ".tdb");
if (f.exists() && !f.isDirectory()) {
f.delete();
}
FileUtils.deleteDirectory(new File(this.path));
TestLoggerFactory.clear();
}
@Test
public void initFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Failed to allocate memory to init a new TrailDB.");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.init();
this.result = null;
}
};
try (TrailDB db = new TrailDB(this.path)) {
// Auto close.
}
}
@Test
public void openFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Failed to open db.");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.open((ByteBuffer)this.any, this.anyString);
this.result = -1;
}
};
try (TrailDB db = new TrailDB(this.path)) {
// Auto close.
}
}
@Test
public void minTimestampOverflow() throws IOException {
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.minTimestamp((ByteBuffer)this.any);
this.result = -1;
}
};
this.db.getMinTimestamp();
assertTrue(this.logger.getLoggingEvents().stream()
.anyMatch(e -> "long overflow, received a negtive value for min timestamp.".equals(e.getMessage())));
}
@Test
public void maxTimestampOverflow() throws IOException {
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.maxTimestamp((ByteBuffer)this.any);
this.result = -1;
}
};
this.db.getMaxTimestamp();
assertTrue(this.logger.getLoggingEvents().stream()
.anyMatch(e -> "long overflow, received a negtive value for max timestamp.".equals(e.getMessage())));
}
@Test
public void versionOverflow() throws IOException {
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.version((ByteBuffer)this.any);
this.result = -1;
}
};
this.db.getVersion();
assertTrue(this.logger.getLoggingEvents().stream()
.anyMatch(e -> "version overflow.".equals(e.getMessage())));
}
@Test
public void getItemFailure() throws IOException {
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.getItem((ByteBuffer)this.any, this.anyLong, this.anyString);
this.result = -1;
}
};
this.db.getItem(0, "bla");
assertTrue(this.logger.getLoggingEvents().stream()
.anyMatch(e -> "Returned item overflow, deal with it carefully!".equals(e.getMessage())));
}
@Test
public void getUUIDFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Invalid trail ID.");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.getUUID((ByteBuffer)this.any, this.anyLong);
this.result = null;
}
};
this.db.getUUID(1);
}
@Test
public void trailCursorNewFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Memory allocation failed for cursor.");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.cursorNew((ByteBuffer)this.any);
this.result = null;
}
};
this.db.trail(0);
}
@Test
public void trailFailure() {
this.expectedEx.expect(TrailDBException.class);
this.expectedEx.expectMessage("Failed to create cursor with code: -1");
final TrailDBNative traildbj = this.traildb;
new Expectations(traildbj) {
{
traildbj.getTrail((ByteBuffer)this.any, this.anyLong);
this.result = -1;
}
};
this.db.trail(0);
}
}
| �� Fix test.
| src/test/java/io/sqooba/traildb/test/TrailDBFailureTest.java | �� Fix test. | <ide><path>rc/test/java/io/sqooba/traildb/test/TrailDBFailureTest.java
<ide>
<ide> public class TrailDBFailureTest {
<ide>
<del> private TrailDBNative traildb = TrailDBNative.INSTANCE;
<add> private final TrailDBNative traildb = TrailDBNative.INSTANCE;
<ide>
<ide> private TrailDB db;
<del> private String path = "testdb";
<del> private String cookie = "12345678123456781234567812345678";
<del> private String otherCookie = "12121212121212121212121212121212";
<add> private final String path = "testdb";
<add> private final String cookie = "12345678123456781234567812345678";
<add> private final String otherCookie = "12121212121212121212121212121212";
<ide>
<ide> private final TestLogger logger = TestLoggerFactory.getTestLogger(TrailDB.class);
<ide>
<ide> public void setUp() throws IOException {
<ide>
<ide> // Initialise a TrailDB with some TrailDBEvents.
<del> TrailDBBuilder builder = new TrailDBBuilder(this.path, new String[] { "field1", "field2" });
<add> final TrailDBBuilder builder = new TrailDBBuilder(this.path, new String[] { "field1", "field2" });
<ide> builder.add(this.cookie, 120, new String[] { "a", "hinata" });
<ide> builder.add(this.cookie, 121, new String[] { "vilya", "" });
<ide> builder.add(this.otherCookie, 122, new String[] { "kaguya", "hinata" });
<ide> public void tearDown() throws IOException {
<ide>
<ide> // Clear the TrailDB files/directories created for the tests.
<del> File f = new File(this.path + ".tdb");
<add> final File f = new File(this.path + ".tdb");
<ide> if (f.exists() && !f.isDirectory()) {
<ide> f.delete();
<ide> }
<ide> };
<ide> this.db.getMaxTimestamp();
<ide> assertTrue(this.logger.getLoggingEvents().stream()
<del> .anyMatch(e -> "long overflow, received a negtive value for max timestamp.".equals(e.getMessage())));
<add> .anyMatch(e -> "long overflow, received a negative value for max timestamp.".equals(e.getMessage())));
<ide> }
<ide>
<ide> @Test |
|
Java | mit | 556a2f8a3d064c7d8fb9719f2858bea9ed2c9075 | 0 | lucasdavid/Compilers-2-assignment-2,lucasdavid/Compilers-2-assignment-2,lucasdavid/Compilers-2-assignment-2,lucasdavid/Compilers-2-assignment-2,lucasdavid/Compilers-2-assignment-2 | package compiler;
import filehandler.JsonWriter;
import grammar.ReceiptLexer;
import grammar.ReceiptParser;
import infrastructure.SemanticListener;
import infrastructure.SyntacticalListener;
import infrastructure.exceptions.*;
import infrastructure.json.JsonStructure;
import infrastructure.messagebag.MessageBag;
import infrastructure.translator.Translator;
import java.io.File;
import java.io.FileInputStream;
import java.util.LinkedList;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
/**
*
* @author Lucas
*/
public class Compiler {
private static LinkedList<String> filesToCompile;
private String in;
private String out;
private static int Id;
Compiler(String in) {
try {
if (in.isEmpty()) {
throw new IllegalArgumentException("Parameter in cannot be an empty String");
}
this.in = in;
String parent = new File(in).getParentFile().getName();
out = in.substring(0, in.indexOf("input")) + "output\\";
if (!parent.equals("input")) {
out += parent + "\\";
}
out += (++Id) + ".json";
} catch (NullPointerException e) {
throw new IllegalArgumentException(e.getMessage());
}
}
Compiler(String in, String out) {
this(in);
this.out = out;
}
/**
* @param args the command line arguments
* @throws java.lang.Exception
*/
public static void main(String[] args) throws Exception {
filesToCompile = new LinkedList<>();
if (args.length > 0) {
File file = new File(args[0]);
if (file.isDirectory()) {
for (File child : file.listFiles()) {
filesToCompile.add(child.getPath());
}
} else {
filesToCompile.add(file.getPath());
}
} else {
filesToCompile.add("src/input/test.txt");
}
for (String file : filesToCompile) {
try {
new Compiler(file).start();
} catch (IllegalArgumentException e) {
System.err.println("The input file name is invalid: \"" + file + "\"");
} catch (RecognitionException e) {
System.err.println("Recognition error on file \"" + file + "\": "
+ e.getMessage());
} catch (LexicalException | SyntacticalException | SemanticException e) {
System.err.println("The compilation of this specific file cannot continue.");
} catch (TranslationException e) {
System.err.println("Translation error on file \"" + file + "\": "
+ e.getMessage());
} catch (JsonExportException e) {
System.err.println("Json exportation error on file \"" + file + "\": "
+ e.getMessage());
} catch (Exception e) {
System.err.println("An error has occured while compiling file \"" + file +
(e.getMessage() != null ? "\": " + e.getMessage() : ""));
}
}
}
private void start() throws Exception {
System.out.println("\nInput file: " + in);
System.out.print("Parsing has started... ");
ANTLRInputStream inputStream = new ANTLRInputStream(new FileInputStream(in));
// instantiating error bag and listeners
MessageBag lexicalBag = new MessageBag();
MessageBag syntacticalBag = new MessageBag();
MessageBag semanticBag = new MessageBag();
SyntacticalListener lexics = new SyntacticalListener(lexicalBag);
SyntacticalListener syntactics = new SyntacticalListener(syntacticalBag);
// injecting listeners
ReceiptLexer lexer = new ReceiptLexer(inputStream);
lexer.addErrorListener(lexics);
ReceiptParser parser = new ReceiptParser(new CommonTokenStream(lexer));
parser.addErrorListener(syntactics);
// parse input file and retrieve its JsonStructure
JsonStructure tree = parser.receipt().e;
// stops compilation proccess if errors were found
if (lexics.hasErrors()) {
throw new LexicalException();
}
if (syntactics.hasErrors()) {
throw new SyntacticalException();
}
System.out.println("Done!");
SemanticListener semantics = new SemanticListener(tree, semanticBag);
if(semantics.hasErrors()) {
System.err.println("Semantic errors were found:");
for (String error : semantics.errors()) {
System.err.println(error);
}
throw new SemanticException();
}
System.out.print("Translation has started... ");
// print JsonStructure to the String result
String result = new Translator(tree)
.run()
.export();
System.out.println("Done!");
System.out.print("Exporting .json file... ");
// export file with printed JsonStructure
new JsonWriter(out, result)
.export();
System.out.println("Done!");
}
}
| Compiler/src/compiler/Compiler.java | package compiler;
import filehandler.JsonWriter;
import grammar.ReceiptLexer;
import grammar.ReceiptParser;
import infrastructure.SemanticListener;
import infrastructure.SyntacticalListener;
import infrastructure.exceptions.*;
import infrastructure.json.JsonStructure;
import infrastructure.messagebag.MessageBag;
import infrastructure.translator.Translator;
import java.io.File;
import java.io.FileInputStream;
import java.util.LinkedList;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
/**
*
* @author Lucas
*/
public class Compiler {
private static LinkedList<String> filesToCompile;
private String in;
private String out;
private static int Id;
Compiler(String in) {
try {
if (in.isEmpty()) {
throw new IllegalArgumentException("Parameter in cannot be an empty String");
}
this.in = in;
String parent = new File(in).getParentFile().getName();
out = in.substring(0, in.indexOf("input")) + "output\\";
if (!parent.equals("input")) {
out += parent + "\\";
}
out += (++Id) + ".json";
} catch (NullPointerException e) {
throw new IllegalArgumentException(e.getMessage());
}
}
Compiler(String in, String out) {
this(in);
this.out = out;
}
/**
* @param args the command line arguments
* @throws java.lang.Exception
*/
public static void main(String[] args) throws Exception {
filesToCompile = new LinkedList<>();
if (args.length > 0) {
File file = new File(args[0]);
if (file.isDirectory()) {
for (File child : file.listFiles()) {
filesToCompile.add(child.getPath());
}
} else {
filesToCompile.add(file.getPath());
}
} else {
filesToCompile.add("src/input/test.txt");
}
for (String file : filesToCompile) {
try {
new Compiler(file).start();
} catch (IllegalArgumentException e) {
System.err.println("The input file name is invalid: \"" + file + "\"");
} catch (RecognitionException e) {
System.err.println("Recognition error on file \"" + file + "\": "
+ e.getMessage());
} catch (LexicalException | SyntacticalException | SemanticException e) {
System.err.println("The compilation of this specific file cannot continue.");
} catch (TranslationException e) {
System.err.println("Translation error on file \"" + file + "\": "
+ e.getMessage());
} catch (JsonExportException e) {
System.err.println("Json exportation error on file \"" + file + "\": "
+ e.getMessage());
} catch (Exception e) {
System.err.println("An error has occured while compiling file \"" + file +
(e.getMessage() != null ? "\": " + e.getMessage() : ""));
}
}
}
private void start() throws Exception {
System.out.println("\nInput file: " + in);
System.out.print("Parsing has started... ");
ANTLRInputStream inputStream = new ANTLRInputStream(new FileInputStream(in));
// instantiating error bag and listeners
MessageBag lexicalBag = new MessageBag();
MessageBag syntacticalBag = new MessageBag();
MessageBag semanticBag = new MessageBag();
SyntacticalListener lexics = new SyntacticalListener(lexicalBag);
SyntacticalListener syntactics = new SyntacticalListener(syntacticalBag);
// injecting listeners
ReceiptLexer lexer = new ReceiptLexer(inputStream);
lexer.addErrorListener(lexics);
ReceiptParser parser = new ReceiptParser(new CommonTokenStream(lexer));
parser.addErrorListener(syntactics);
// parse input file and retrieve its JsonStructure
JsonStructure tree = parser.receipt().e;
// stops compilation proccess if errors were found
if (lexics.hasErrors()) {
throw new LexicalException();
}
if (lexics.hasErrors()) {
throw new SyntacticalException();
}
System.out.println("Done!");
SemanticListener semantics = new SemanticListener(tree, semanticBag);
if(semantics.hasErrors()) {
System.err.println("Semantic errors were found:");
for (String error : semantics.errors()) {
System.err.println(error);
}
throw new SemanticException();
}
System.out.print("Translation has started... ");
// print JsonStructure to the String result
String result = new Translator(tree)
.run()
.export();
System.out.println("Done!");
System.out.print("Exporting .json file... ");
// export file with printed JsonStructure
new JsonWriter(out, result)
.export();
System.out.println("Done!");
}
}
| Fix lexics.hasErrors line
| Compiler/src/compiler/Compiler.java | Fix lexics.hasErrors line | <ide><path>ompiler/src/compiler/Compiler.java
<ide> throw new LexicalException();
<ide> }
<ide>
<del> if (lexics.hasErrors()) {
<add> if (syntactics.hasErrors()) {
<ide> throw new SyntacticalException();
<ide> }
<ide> |
|
Java | mit | 7e4275146ccbaf9f6e3cd16d8a99fdf8a2ef715a | 0 | nls-oskari/oskari-server,nls-oskari/oskari-server,nls-oskari/oskari-server | package org.oskari.statistics.plugins.unsd;
import fi.nls.oskari.control.statistics.data.IndicatorValue;
import fi.nls.oskari.control.statistics.data.StatisticalIndicator;
import fi.nls.oskari.control.statistics.data.StatisticalIndicatorDataModel;
import fi.nls.oskari.control.statistics.data.StatisticalIndicatorLayer;
import fi.nls.oskari.control.statistics.plugins.StatisticalDatasourcePlugin;
import fi.nls.oskari.control.statistics.plugins.db.DatasourceLayer;
import fi.nls.oskari.control.statistics.plugins.db.StatisticalDatasource;
import fi.nls.oskari.log.LogFactory;
import fi.nls.oskari.log.Logger;
import fi.nls.oskari.util.JSONHelper;
import fi.nls.oskari.util.PropertyUtil;
import org.json.JSONObject;
import java.util.*;
import java.util.stream.Collectors;
public class UnsdStatisticalDatasourcePlugin extends StatisticalDatasourcePlugin {
private final static Logger LOG = LogFactory.getLogger(UnsdStatisticalDatasourcePlugin.class);
private UnsdConfig config;
private UnsdDataParser indicatorValuesFetcher;
private RegionMapper regionMapper;
/**
* Maps the UNSD area codes to Oskari layers.
*/
private Map<Long, String[]> layerAreaCodes = new HashMap<>();
@Override
public void init(StatisticalDatasource source) {
super.init(source);
config = new UnsdConfig(source.getConfigJSON(), source.getId());
indicatorValuesFetcher = new UnsdDataParser(config);
regionMapper = new RegionMapper();
// optimization for getting data just for the countries we are showing
initAreaCodes(source.getLayers());
}
@Override
public void update() {
// get the indicator listing
UnsdRequest request = new UnsdRequest(config);
request.setGoal(config.getGoal());
String targetsResponse = request.getTargets();
List<StatisticalIndicator> indicators = UnsdIndicatorParser.parseIndicators(targetsResponse);
indicators = resolveDimensionsForAllIndicatorsOfGoal(indicators);
// And write to cache serially to preserve sorted order
indicators.forEach(ind -> onIndicatorProcessed(ind));
LOG.info("Indicators handled.");
}
private List<StatisticalIndicator> resolveDimensionsForAllIndicatorsOfGoal(List<StatisticalIndicator> indicators) {
// Resolve dimensions parallel for indicators
return indicators.parallelStream().map(ind -> resolveDimensionsForSingleIndicator(ind))
.sorted((ind1, ind2) -> ind1.getId().compareTo(ind2.getId())).collect(Collectors.toList());
}
@Override
public Map<String, IndicatorValue> getIndicatorValues(StatisticalIndicator indicator,
StatisticalIndicatorDataModel params, StatisticalIndicatorLayer regionset) {
String[] areaCodes = layerAreaCodes.get(regionset.getOskariLayerId());
// map m49 codes back to region ids (iso2 etc) before returning
Map<String, IndicatorValue> values = indicatorValuesFetcher.get(params, indicator.getId(), areaCodes);
List<CountryRegion> regions = values.keySet().stream().map(m49 -> regionMapper.find(m49))
.filter(Optional::isPresent).map(Optional::get).collect(Collectors.toList());
Map<String, IndicatorValue> updated = new HashMap<>();
regions.stream().forEach(c -> {
IndicatorValue value = values.get(c.getCode(CountryRegion.Type.M49_WO_LEADING));
// TODO: check if the region code from layer is iso2 or iso3 or m49
// Now always assumes iso2
updated.put(c.getCode(CountryRegion.Type.ISO2), value);
});
return updated;
}
private void initAreaCodes(List<DatasourceLayer> layers) {
// TODO; Get codes from RegionSetHelper?
String[] regionWhitelist = PropertyUtil.getCommaSeparatedList("unsd.region.whitelist");
if (regionWhitelist.length == 0) {
// no whitelist -> get data for all regions
return;
}
List<String> countries = Arrays.stream(regionWhitelist).map(code -> regionMapper.find(code))
.filter(Optional::isPresent).map(Optional::get).map(c -> c.getCode(CountryRegion.Type.M49))
.collect(Collectors.toList());
for (DatasourceLayer layer : layers) {
layerAreaCodes.put(layer.getMaplayerId(), countries.toArray(new String[0]));
}
}
private StatisticalIndicator resolveDimensionsForSingleIndicator(StatisticalIndicator ind) {
UnsdRequest request = new UnsdRequest(config);
request.setGoal(config.getGoal());
request.setIndicator(ind.getId());
JSONObject dataResponse = JSONHelper.createJSONObject(request.getIndicatorData(null));
ind.setSource(UnsdIndicatorParser.parseSource(dataResponse));
// Parse indicator specific dimensions from indicator data response
ind.setDataModel(UnsdIndicatorParser.parseDimensions(dataResponse));
// Parse time period from indicator data responses(from all pages)
ind.getDataModel().addDimension(indicatorValuesFetcher
.getTimeperiodDimensionFromIndicatorData(config.getTimeVariableId(), ind.getId()));
ind.getDataModel().setTimeVariable(config.getTimeVariableId());
getSource().getLayers().stream().forEach(l -> ind.addLayer(l));
return ind;
}
}
| service-statistics-unsd/src/main/java/org/oskari/statistics/plugins/unsd/UnsdStatisticalDatasourcePlugin.java | package org.oskari.statistics.plugins.unsd;
import fi.nls.oskari.control.statistics.data.IndicatorValue;
import fi.nls.oskari.control.statistics.data.StatisticalIndicator;
import fi.nls.oskari.control.statistics.data.StatisticalIndicatorDataModel;
import fi.nls.oskari.control.statistics.data.StatisticalIndicatorLayer;
import fi.nls.oskari.control.statistics.plugins.StatisticalDatasourcePlugin;
import fi.nls.oskari.control.statistics.plugins.db.DatasourceLayer;
import fi.nls.oskari.control.statistics.plugins.db.StatisticalDatasource;
import fi.nls.oskari.log.LogFactory;
import fi.nls.oskari.log.Logger;
import fi.nls.oskari.util.JSONHelper;
import fi.nls.oskari.util.PropertyUtil;
import org.json.JSONObject;
import java.util.*;
import java.util.stream.Collectors;
public class UnsdStatisticalDatasourcePlugin extends StatisticalDatasourcePlugin {
private final static Logger LOG = LogFactory.getLogger(UnsdStatisticalDatasourcePlugin.class);
private UnsdConfig config;
private UnsdDataParser indicatorValuesFetcher;
private RegionMapper regionMapper;
/**
* Maps the UNSD area codes to Oskari layers.
*/
private Map<Long, String[]> layerAreaCodes = new HashMap<>();
@Override
public void update() {
// get the indicator listing
UnsdRequest request = new UnsdRequest(config);
request.setGoal(config.getGoal());
String targetsResponse = request.getTargets();
List<StatisticalIndicator> indicators = UnsdIndicatorParser.parseIndicators(targetsResponse);
// Resolve indicator dimensions parallel
indicators = indicators.parallelStream().map(ind -> resolveIndicatorDimensions(ind))
.sorted((ind1, ind2) -> ind1.getId().compareTo(ind2.getId())).collect(Collectors.toList());
// And write to cache serially to preserve sorted order
indicators.forEach(ind -> onIndicatorProcessed(ind));
LOG.info("Indicators handled.");
}
private StatisticalIndicator resolveIndicatorDimensions(StatisticalIndicator ind) {
UnsdRequest request = new UnsdRequest(config);
request.setGoal(config.getGoal());
request.setIndicator(ind.getId());
JSONObject dataResponse = JSONHelper.createJSONObject(request.getIndicatorData(null));
ind.setSource(UnsdIndicatorParser.parseSource(dataResponse));
// Parse indicator specific dimensions from indicator data response
ind.setDataModel(UnsdIndicatorParser.parseDimensions(dataResponse));
// Parse time period from indicator data responses(from all pages)
ind.getDataModel().addDimension(indicatorValuesFetcher
.getTimeperiodDimensionFromIndicatorData(config.getTimeVariableId(), ind.getId()));
ind.getDataModel().setTimeVariable(config.getTimeVariableId());
getSource().getLayers().stream().forEach(l -> ind.addLayer(l));
return ind;
}
@Override
public void init(StatisticalDatasource source) {
super.init(source);
config = new UnsdConfig(source.getConfigJSON(), source.getId());
indicatorValuesFetcher = new UnsdDataParser(config);
regionMapper = new RegionMapper();
// optimization for getting data just for the countries we are showing
initAreaCodes(source.getLayers());
}
private void initAreaCodes(List<DatasourceLayer> layers) {
// TODO; Get codes from RegionSetHelper?
String[] regionWhitelist = PropertyUtil.getCommaSeparatedList("unsd.region.whitelist");
if (regionWhitelist.length == 0) {
// no whitelist -> get data for all regions
return;
}
List<String> countries = Arrays.stream(regionWhitelist).map(code -> regionMapper.find(code))
.filter(Optional::isPresent).map(Optional::get).map(c -> c.getCode(CountryRegion.Type.M49))
.collect(Collectors.toList());
for (DatasourceLayer layer : layers) {
layerAreaCodes.put(layer.getMaplayerId(), countries.toArray(new String[0]));
}
}
@Override
public Map<String, IndicatorValue> getIndicatorValues(StatisticalIndicator indicator,
StatisticalIndicatorDataModel params, StatisticalIndicatorLayer regionset) {
String[] areaCodes = layerAreaCodes.get(regionset.getOskariLayerId());
// map m49 codes back to region ids (iso2 etc) before returning
Map<String, IndicatorValue> values = indicatorValuesFetcher.get(params, indicator.getId(), areaCodes);
List<CountryRegion> regions = values.keySet().stream().map(m49 -> regionMapper.find(m49))
.filter(Optional::isPresent).map(Optional::get).collect(Collectors.toList());
Map<String, IndicatorValue> updated = new HashMap<>();
regions.stream().forEach(c -> {
IndicatorValue value = values.get(c.getCode(CountryRegion.Type.M49_WO_LEADING));
// TODO: check if the region code from layer is iso2 or iso3 or m49
// Now always assumes iso2
updated.put(c.getCode(CountryRegion.Type.ISO2), value);
});
return updated;
}
}
| Refactor method order and names
| service-statistics-unsd/src/main/java/org/oskari/statistics/plugins/unsd/UnsdStatisticalDatasourcePlugin.java | Refactor method order and names | <ide><path>ervice-statistics-unsd/src/main/java/org/oskari/statistics/plugins/unsd/UnsdStatisticalDatasourcePlugin.java
<ide> private Map<Long, String[]> layerAreaCodes = new HashMap<>();
<ide>
<ide> @Override
<del> public void update() {
<del> // get the indicator listing
<del> UnsdRequest request = new UnsdRequest(config);
<del> request.setGoal(config.getGoal());
<del> String targetsResponse = request.getTargets();
<del> List<StatisticalIndicator> indicators = UnsdIndicatorParser.parseIndicators(targetsResponse);
<del> // Resolve indicator dimensions parallel
<del> indicators = indicators.parallelStream().map(ind -> resolveIndicatorDimensions(ind))
<del> .sorted((ind1, ind2) -> ind1.getId().compareTo(ind2.getId())).collect(Collectors.toList());
<del> // And write to cache serially to preserve sorted order
<del> indicators.forEach(ind -> onIndicatorProcessed(ind));
<del> LOG.info("Indicators handled.");
<del> }
<del>
<del> private StatisticalIndicator resolveIndicatorDimensions(StatisticalIndicator ind) {
<del> UnsdRequest request = new UnsdRequest(config);
<del> request.setGoal(config.getGoal());
<del> request.setIndicator(ind.getId());
<del> JSONObject dataResponse = JSONHelper.createJSONObject(request.getIndicatorData(null));
<del> ind.setSource(UnsdIndicatorParser.parseSource(dataResponse));
<del> // Parse indicator specific dimensions from indicator data response
<del> ind.setDataModel(UnsdIndicatorParser.parseDimensions(dataResponse));
<del> // Parse time period from indicator data responses(from all pages)
<del> ind.getDataModel().addDimension(indicatorValuesFetcher
<del> .getTimeperiodDimensionFromIndicatorData(config.getTimeVariableId(), ind.getId()));
<del> ind.getDataModel().setTimeVariable(config.getTimeVariableId());
<del> getSource().getLayers().stream().forEach(l -> ind.addLayer(l));
<del> return ind;
<del> }
<del>
<del> @Override
<ide> public void init(StatisticalDatasource source) {
<ide> super.init(source);
<ide> config = new UnsdConfig(source.getConfigJSON(), source.getId());
<ide> // optimization for getting data just for the countries we are showing
<ide> initAreaCodes(source.getLayers());
<ide> }
<add>
<add> @Override
<add> public void update() {
<add> // get the indicator listing
<add> UnsdRequest request = new UnsdRequest(config);
<add> request.setGoal(config.getGoal());
<add> String targetsResponse = request.getTargets();
<add> List<StatisticalIndicator> indicators = UnsdIndicatorParser.parseIndicators(targetsResponse);
<add> indicators = resolveDimensionsForAllIndicatorsOfGoal(indicators);
<add> // And write to cache serially to preserve sorted order
<add> indicators.forEach(ind -> onIndicatorProcessed(ind));
<add> LOG.info("Indicators handled.");
<add> }
<ide>
<del> private void initAreaCodes(List<DatasourceLayer> layers) {
<del> // TODO; Get codes from RegionSetHelper?
<del> String[] regionWhitelist = PropertyUtil.getCommaSeparatedList("unsd.region.whitelist");
<del> if (regionWhitelist.length == 0) {
<del> // no whitelist -> get data for all regions
<del> return;
<del> }
<del>
<del> List<String> countries = Arrays.stream(regionWhitelist).map(code -> regionMapper.find(code))
<del> .filter(Optional::isPresent).map(Optional::get).map(c -> c.getCode(CountryRegion.Type.M49))
<del> .collect(Collectors.toList());
<del>
<del> for (DatasourceLayer layer : layers) {
<del> layerAreaCodes.put(layer.getMaplayerId(), countries.toArray(new String[0]));
<del> }
<add> private List<StatisticalIndicator> resolveDimensionsForAllIndicatorsOfGoal(List<StatisticalIndicator> indicators) {
<add> // Resolve dimensions parallel for indicators
<add> return indicators.parallelStream().map(ind -> resolveDimensionsForSingleIndicator(ind))
<add> .sorted((ind1, ind2) -> ind1.getId().compareTo(ind2.getId())).collect(Collectors.toList());
<ide> }
<ide>
<ide> @Override
<ide>
<ide> return updated;
<ide> }
<add>
<add> private void initAreaCodes(List<DatasourceLayer> layers) {
<add> // TODO; Get codes from RegionSetHelper?
<add> String[] regionWhitelist = PropertyUtil.getCommaSeparatedList("unsd.region.whitelist");
<add> if (regionWhitelist.length == 0) {
<add> // no whitelist -> get data for all regions
<add> return;
<add> }
<add>
<add> List<String> countries = Arrays.stream(regionWhitelist).map(code -> regionMapper.find(code))
<add> .filter(Optional::isPresent).map(Optional::get).map(c -> c.getCode(CountryRegion.Type.M49))
<add> .collect(Collectors.toList());
<add>
<add> for (DatasourceLayer layer : layers) {
<add> layerAreaCodes.put(layer.getMaplayerId(), countries.toArray(new String[0]));
<add> }
<add> }
<add>
<add> private StatisticalIndicator resolveDimensionsForSingleIndicator(StatisticalIndicator ind) {
<add> UnsdRequest request = new UnsdRequest(config);
<add> request.setGoal(config.getGoal());
<add> request.setIndicator(ind.getId());
<add> JSONObject dataResponse = JSONHelper.createJSONObject(request.getIndicatorData(null));
<add> ind.setSource(UnsdIndicatorParser.parseSource(dataResponse));
<add> // Parse indicator specific dimensions from indicator data response
<add> ind.setDataModel(UnsdIndicatorParser.parseDimensions(dataResponse));
<add> // Parse time period from indicator data responses(from all pages)
<add> ind.getDataModel().addDimension(indicatorValuesFetcher
<add> .getTimeperiodDimensionFromIndicatorData(config.getTimeVariableId(), ind.getId()));
<add> ind.getDataModel().setTimeVariable(config.getTimeVariableId());
<add> getSource().getLayers().stream().forEach(l -> ind.addLayer(l));
<add> return ind;
<add> }
<ide> } |
|
JavaScript | mit | 54e5ff24cb5a075237855ac7a34ac715c0374068 | 0 | yuchi/react-titanium | import invariant from 'react/lib/invariant';
const { assign } = Object;
// Utilities
const handlerRE = /^on[A-Z]/g;
export function extractHandlers(props) {
const handlers = {};
const rest = {};
for (let key of Object.keys(props)) {
if (key.match(handlerRE)) {
handlers[ key.slice(2, 3).toLowerCase() + key.slice(3) ] = props [ key ];
}
else {
rest[ key ] = props[ key ];
}
}
return { handlers, rest };
}
// Definitions
const registry = {};
export function get(type) {
invariant(
(type in registry),
`No definition found for "${ type }"`
);
return registry[ type ];
}
export function register(shortName, apiName, config = {}) {
const definition = { shortName, apiName, ...config };
registry[ shortName ] = definition;
registry[ apiName ] = definition;
return definition;
}
export function create(type, props, handlers) {
const definition = get(type);
const view = definition.factory(props);
for (let name in handlers) {
view.addEventListener(name, handlers[ name ]);
}
return view;
}
export function update(view, props, handlers) {
// TODO: manage handlers
for (let key in props) {
let nextValue = props[key];
if (key === 'value') {
let oldValue = view[key];
if (nextValue === oldValue) {
continue;
}
}
view[key] = nextValue;
}
// view.applyProperties(props);
}
export function updateChildren(view, children) {
view.removeAllChildren();
// NOTE: Sloooooooow...
let i = 0;
let l = children.length;
for (; i < l; ++i) {
if (children[i]) {
view.add(children[i]);
}
}
}
// Built-ins
register("window", "Titanium.UI.Window", {
factory: props => Titanium.UI.createWindow(props)
});
register("view", "Titanium.UI.View", {
factory: props => Titanium.UI.createView(props)
});
register("button", "Titanium.UI.Button", {
factory: props => Titanium.UI.createButton(props)
});
register("input", "Titanium.UI.TextField", {
factory: props => Titanium.UI.createTextField(props)
});
register("textarea", "Titanium.UI.TextArea", {
factory: props => Titanium.UI.createTextArea(props)
});
register("label", "Titanium.UI.Label", {
factory: props => Titanium.UI.createLabel(props)
});
register("list", "Titanium.UI.ListView", {
factory: props => Titanium.UI.createListView(props)
});
register("switch", "Titanium.UI.Switch", {
factory: props => Titanium.UI.createSwitch(props)
});
register("optiondialog", "Titanium.UI.OptionDialog", {
factory: props => Titanium.UI.OptionDialog(props)
});
register("slider", "Titanium.UI.Slider", {
factory: props => Titanium.UI.Slider(props)
});
register("root", "react.titanium.Root", {
factory: props => ({
apiName: "react.titanium.Root",
children: [],
add(child) {
this.children.push(child);
},
open(props) {
for (let child of this.children) {
child.open(props);
}
}
})
});
| src/lib/ReactTitaniumBridge.js | import invariant from 'react/lib/invariant';
const { assign } = Object;
// Utilities
const handlerRE = /^on[A-Z]/g;
export function extractHandlers(props) {
const handlers = {};
const rest = {};
for (let key of Object.keys(props)) {
if (key.match(handlerRE)) {
handlers[ key.slice(2, 3).toLowerCase() + key.slice(3) ] = props [ key ];
}
else {
rest[ key ] = props[ key ];
}
}
return { handlers, rest };
}
// Definitions
const registry = {};
export function get(type) {
invariant(
(type in registry),
`No definition found for "${ type }"`
);
return registry[ type ];
}
export function register(shortName, apiName, config = {}) {
const definition = { shortName, apiName, ...config };
registry[ shortName ] = definition;
registry[ apiName ] = definition;
return definition;
}
export function create(type, props, handlers) {
const definition = get(type);
const view = definition.factory(props);
for (let name in handlers) {
view.addEventListener(name, handlers[ name ]);
}
return view;
}
export function update(view, props, handlers) {
// TODO: manage handlers
for (let key in props) {
let nextValue = props[key];
if (key === 'value') {
let oldValue = view[key];
if (nextValue === oldValue) {
continue;
}
}
view[key] = nextValue;
}
// view.applyProperties(props);
}
export function updateChildren(view, children) {
view.removeAllChildren();
// NOTE: Sloooooooow...
let i = 0;
let l = children.length;
for (; i < l; ++i) {
if (children[i]) {
view.add(children[i]);
}
}
}
// Built-ins
register("window", "Titanium.UI.Window", {
factory: props => Titanium.UI.createWindow(props)
});
register("view", "Titanium.UI.View", {
factory: props => Titanium.UI.createView(props)
});
register("button", "Titanium.UI.Button", {
factory: props => Titanium.UI.createButton(props)
});
register("input", "Titanium.UI.TextField", {
factory: props => Titanium.UI.createTextField(props)
});
register("textarea", "Titanium.UI.TextArea", {
factory: props => Titanium.UI.createTextArea(props)
});
register("label", "Titanium.UI.Label", {
factory: props => Titanium.UI.createLabel(props)
});
register("list", "Titanium.UI.ListView", {
factory: props => Titanium.UI.createListView(props)
});
register("root", "react.titanium.Root", {
factory: props => ({
apiName: "react.titanium.Root",
children: [],
add(child) {
this.children.push(child);
},
open(props) {
for (let child of this.children) {
child.open(props);
}
}
})
});
| added slider, optiondialog, and switch
| src/lib/ReactTitaniumBridge.js | added slider, optiondialog, and switch | <ide><path>rc/lib/ReactTitaniumBridge.js
<ide> factory: props => Titanium.UI.createListView(props)
<ide> });
<ide>
<add>register("switch", "Titanium.UI.Switch", {
<add> factory: props => Titanium.UI.createSwitch(props)
<add>});
<add>
<add>register("optiondialog", "Titanium.UI.OptionDialog", {
<add> factory: props => Titanium.UI.OptionDialog(props)
<add>});
<add>
<add>register("slider", "Titanium.UI.Slider", {
<add> factory: props => Titanium.UI.Slider(props)
<add>});
<add>
<ide> register("root", "react.titanium.Root", {
<ide> factory: props => ({
<ide> apiName: "react.titanium.Root", |
|
Java | apache-2.0 | error: pathspec 'autosize/src/main/java/me/jessyan/autosize/unit/UnitsManager.java' did not match any file(s) known to git
| dace5efb4d2fce57adc352a82085704e0297d046 | 1 | JessYanCoding/AndroidAutoSize | /*
* Copyright 2018 JessYan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.jessyan.autosize.unit;
import android.util.DisplayMetrics;
import me.jessyan.autosize.utils.Preconditions;
/**
* ================================================
* 管理 AndroidAutoSize 支持的所有单位, AndroidAutoSize 支持五种单位 (dp、sp、pt、in、mm)
* 其中 dp、sp 这两个是比较常见的单位, 作为 AndroidAutoSize 的主单位, 默认被 AndroidAutoSize 支持
* pt、in、mm 这三个是比较少见的单位, 只可以选择其中的一个, 作为 AndroidAutoSize 的副单位, 与 dp、sp 一起被 AndroidAutoSize 支持
* 副单位是用于规避修改 {@link DisplayMetrics#density} 所带来的对于其他使用 dp 布局的系统控件或三方库控件的不良影响
* 你选择什么单位, 就在 layout 文件中用什么单位布局
* <p>
* 两个主单位和一个副单位, 可以随时使用下面的方法关闭和重新开启对它们的支持
* 如果你想完全规避修改 {@link DisplayMetrics#density} 所带来的对于其他使用 dp 布局的系统控件或三方库控件的不良影响
* 那请调用 {@link #setSupportDP}、{@link #setSupportSP} 都设置为 {@code false}, 停止对两个主单位的支持 (如果开启 sp, 对其他三方库控件影响不大, 也可以不关闭对 sp 的支持)
* 并调用 {@link #setSupportSubunits} 从三个冷门单位中选择一个作为副单位 (三个单位的效果都是一样的, 按自己的喜好选择, 比如我就喜欢 mm, 翻译为中文是妹妹的意思)
* 然后在 layout 文件中只使用这个副单位进行布局, 这样就可以完全规避修改 {@link DisplayMetrics#density} 所带来的问题
* 因为 dp、sp 这两个单位在其他系统控件或三方库控件中都非常常见, 但三个冷门单位却非常少见
* <p>
* Created by JessYan on 2018/8/28 10:21
* <a href="mailto:[email protected]">Contact me</a>
* <a href="https://github.com/JessYanCoding">Follow me</a>
* ================================================
*/
public class UnitsManager {
/**
* 是否支持 dp 单位, 默认支持
*/
private boolean isSupportDP = true;
/**
* 是否支持 sp 单位, 默认支持
*/
private boolean isSupportSP = true;
/**
* 是否支持副单位, 以什么为副单位? 默认不支持
*/
private Subunits mSupportSubunits = Subunits.NONE;
/**
* 是否支持 dp 单位, 默认支持, 详情请看类文件的注释 {@link UnitsManager}
*
* @return {@code true} 为支持, {@code false} 为不支持
*/
public boolean isSupportDP() {
return isSupportDP;
}
/**
* 是否让 AndroidAutoSize 支持 dp 单位, 默认支持, 详情请看类文件的注释 {@link UnitsManager}
*
* @param supportDP {@code true} 为支持, {@code false} 为不支持
*/
public void setSupportDP(boolean supportDP) {
isSupportDP = supportDP;
}
/**
* 是否支持 sp 单位, 默认支持, 详情请看类文件的注释 {@link UnitsManager}
*
* @return {@code true} 为支持, {@code false} 为不支持
*/
public boolean isSupportSP() {
return isSupportSP;
}
/**
* 是否让 AndroidAutoSize 支持 sp 单位, 默认支持, 详情请看类文件的注释 {@link UnitsManager}
*
* @param supportSP {@code true} 为支持, {@code false} 为不支持
*/
public void setSupportSP(boolean supportSP) {
isSupportSP = supportSP;
}
/**
* AndroidAutoSize 以什么单位为副单位, 默认为 {@link Subunits#NONE}, 即不支持副单位, 详情请看类文件的注释 {@link UnitsManager}
*
* @return {@link Subunits}
*/
public Subunits getSupportSubunits() {
return mSupportSubunits;
}
/**
* 让 AndroidAutoSize 以什么单位为副单位, 在 pt、in、mm 这三个冷门单位中选择一个即可, 三个效果都是一样的
* 按自己的喜好选择, 比如我就喜欢 mm, 翻译为中文是妹妹的意思
* 默认为 {@link Subunits#NONE}, 即不支持副单位, 详情请看类文件的注释 {@link UnitsManager}
*
* @param supportSubunits {@link Subunits}
*/
public void setSupportSubunits(Subunits supportSubunits) {
mSupportSubunits = Preconditions.checkNotNull(supportSubunits,
"The supportSubunits can not be null, use Subunits.NONE instead");
}
}
| autosize/src/main/java/me/jessyan/autosize/unit/UnitsManager.java | Create UnitsManager
| autosize/src/main/java/me/jessyan/autosize/unit/UnitsManager.java | Create UnitsManager | <ide><path>utosize/src/main/java/me/jessyan/autosize/unit/UnitsManager.java
<add>/*
<add> * Copyright 2018 JessYan
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>package me.jessyan.autosize.unit;
<add>
<add>import android.util.DisplayMetrics;
<add>
<add>import me.jessyan.autosize.utils.Preconditions;
<add>
<add>/**
<add> * ================================================
<add> * 管理 AndroidAutoSize 支持的所有单位, AndroidAutoSize 支持五种单位 (dp、sp、pt、in、mm)
<add> * 其中 dp、sp 这两个是比较常见的单位, 作为 AndroidAutoSize 的主单位, 默认被 AndroidAutoSize 支持
<add> * pt、in、mm 这三个是比较少见的单位, 只可以选择其中的一个, 作为 AndroidAutoSize 的副单位, 与 dp、sp 一起被 AndroidAutoSize 支持
<add> * 副单位是用于规避修改 {@link DisplayMetrics#density} 所带来的对于其他使用 dp 布局的系统控件或三方库控件的不良影响
<add> * 你选择什么单位, 就在 layout 文件中用什么单位布局
<add> * <p>
<add> * 两个主单位和一个副单位, 可以随时使用下面的方法关闭和重新开启对它们的支持
<add> * 如果你想完全规避修改 {@link DisplayMetrics#density} 所带来的对于其他使用 dp 布局的系统控件或三方库控件的不良影响
<add> * 那请调用 {@link #setSupportDP}、{@link #setSupportSP} 都设置为 {@code false}, 停止对两个主单位的支持 (如果开启 sp, 对其他三方库控件影响不大, 也可以不关闭对 sp 的支持)
<add> * 并调用 {@link #setSupportSubunits} 从三个冷门单位中选择一个作为副单位 (三个单位的效果都是一样的, 按自己的喜好选择, 比如我就喜欢 mm, 翻译为中文是妹妹的意思)
<add> * 然后在 layout 文件中只使用这个副单位进行布局, 这样就可以完全规避修改 {@link DisplayMetrics#density} 所带来的问题
<add> * 因为 dp、sp 这两个单位在其他系统控件或三方库控件中都非常常见, 但三个冷门单位却非常少见
<add> * <p>
<add> * Created by JessYan on 2018/8/28 10:21
<add> * <a href="mailto:[email protected]">Contact me</a>
<add> * <a href="https://github.com/JessYanCoding">Follow me</a>
<add> * ================================================
<add> */
<add>public class UnitsManager {
<add> /**
<add> * 是否支持 dp 单位, 默认支持
<add> */
<add> private boolean isSupportDP = true;
<add> /**
<add> * 是否支持 sp 单位, 默认支持
<add> */
<add> private boolean isSupportSP = true;
<add> /**
<add> * 是否支持副单位, 以什么为副单位? 默认不支持
<add> */
<add> private Subunits mSupportSubunits = Subunits.NONE;
<add>
<add> /**
<add> * 是否支持 dp 单位, 默认支持, 详情请看类文件的注释 {@link UnitsManager}
<add> *
<add> * @return {@code true} 为支持, {@code false} 为不支持
<add> */
<add> public boolean isSupportDP() {
<add> return isSupportDP;
<add> }
<add>
<add> /**
<add> * 是否让 AndroidAutoSize 支持 dp 单位, 默认支持, 详情请看类文件的注释 {@link UnitsManager}
<add> *
<add> * @param supportDP {@code true} 为支持, {@code false} 为不支持
<add> */
<add> public void setSupportDP(boolean supportDP) {
<add> isSupportDP = supportDP;
<add> }
<add>
<add> /**
<add> * 是否支持 sp 单位, 默认支持, 详情请看类文件的注释 {@link UnitsManager}
<add> *
<add> * @return {@code true} 为支持, {@code false} 为不支持
<add> */
<add> public boolean isSupportSP() {
<add> return isSupportSP;
<add> }
<add>
<add> /**
<add> * 是否让 AndroidAutoSize 支持 sp 单位, 默认支持, 详情请看类文件的注释 {@link UnitsManager}
<add> *
<add> * @param supportSP {@code true} 为支持, {@code false} 为不支持
<add> */
<add> public void setSupportSP(boolean supportSP) {
<add> isSupportSP = supportSP;
<add> }
<add>
<add> /**
<add> * AndroidAutoSize 以什么单位为副单位, 默认为 {@link Subunits#NONE}, 即不支持副单位, 详情请看类文件的注释 {@link UnitsManager}
<add> *
<add> * @return {@link Subunits}
<add> */
<add> public Subunits getSupportSubunits() {
<add> return mSupportSubunits;
<add> }
<add>
<add> /**
<add> * 让 AndroidAutoSize 以什么单位为副单位, 在 pt、in、mm 这三个冷门单位中选择一个即可, 三个效果都是一样的
<add> * 按自己的喜好选择, 比如我就喜欢 mm, 翻译为中文是妹妹的意思
<add> * 默认为 {@link Subunits#NONE}, 即不支持副单位, 详情请看类文件的注释 {@link UnitsManager}
<add> *
<add> * @param supportSubunits {@link Subunits}
<add> */
<add> public void setSupportSubunits(Subunits supportSubunits) {
<add> mSupportSubunits = Preconditions.checkNotNull(supportSubunits,
<add> "The supportSubunits can not be null, use Subunits.NONE instead");
<add> }
<add>} |
|
Java | mpl-2.0 | b97c5d49819f22ba2368e39900d9f55c5955c04a | 0 | kckc/openmrs-core,jcantu1988/openmrs-core,WANeves/openmrs-core,prisamuel/openmrs-core,sravanthi17/openmrs-core,shiangree/openmrs-core,jamesfeshner/openmrs-module,Openmrs-joel/openmrs-core,WANeves/openmrs-core,maekstr/openmrs-core,jvena1/openmrs-core,joansmith/openmrs-core,foolchan2556/openmrs-core,kabariyamilind/openMRSDEV,alexwind26/openmrs-core,MuhammadSafwan/Stop-Button-Ability,alexwind26/openmrs-core,MitchellBot/openmrs-core,Bhamni/openmrs-core,Winbobob/openmrs-core,ssmusoke/openmrs-core,siddharthkhabia/openmrs-core,alexei-grigoriev/openmrs-core,naraink/openmrs-core,kigsmtua/openmrs-core,dcmul/openmrs-core,michaelhofer/openmrs-core,kabariyamilind/openMRSDEV,Ch3ck/openmrs-core,sintjuri/openmrs-core,MuhammadSafwan/Stop-Button-Ability,iLoop2/openmrs-core,spereverziev/openmrs-core,Ch3ck/openmrs-core,kristopherschmidt/openmrs-core,milankarunarathne/openmrs-core,sravanthi17/openmrs-core,shiangree/openmrs-core,milankarunarathne/openmrs-core,WANeves/openmrs-core,macorrales/openmrs-core,jcantu1988/openmrs-core,asifur77/openmrs,geoff-wasilwa/openmrs-core,koskedk/openmrs-core,aboutdata/openmrs-core,sadhanvejella/openmrs,ern2/openmrs-core,foolchan2556/openmrs-core,joansmith/openmrs-core,maany/openmrs-core,hoquangtruong/TestMylyn,naraink/openmrs-core,kckc/openmrs-core,lbl52001/openmrs-core,alexei-grigoriev/openmrs-core,shiangree/openmrs-core,kigsmtua/openmrs-core,lbl52001/openmrs-core,Ch3ck/openmrs-core,lbl52001/openmrs-core,aboutdata/openmrs-core,iLoop2/openmrs-core,nilusi/Legacy-UI,hoquangtruong/TestMylyn,Openmrs-joel/openmrs-core,kristopherschmidt/openmrs-core,kabariyamilind/openMRSDEV,spereverziev/openmrs-core,donaldgavis/openmrs-core,aboutdata/openmrs-core,maekstr/openmrs-core,WANeves/openmrs-core,jvena1/openmrs-core,asifur77/openmrs,Bhamni/openmrs-core,nilusi/Legacy-UI,preethi29/openmrs-core,joansmith/openmrs-core,maany/openmrs-core,michaelhofer/openmrs-core,iLoop2/openmrs-core,chethandeshpande/openmrs-core,iLoop2/openmrs-core,andyvand/OpenMRS,milankarunarathne/openmrs-core,pselle/openmrs-core,Negatu/openmrs-core,Negatu/openmrs-core,michaelhofer/openmrs-core,dcmul/openmrs-core,hoquangtruong/TestMylyn,nilusi/Legacy-UI,vinayvenu/openmrs-core,foolchan2556/openmrs-core,iLoop2/openmrs-core,siddharthkhabia/openmrs-core,spereverziev/openmrs-core,preethi29/openmrs-core,pselle/openmrs-core,prisamuel/openmrs-core,vinayvenu/openmrs-core,Openmrs-joel/openmrs-core,milankarunarathne/openmrs-core,ssmusoke/openmrs-core,jvena1/openmrs-core,Bhamni/openmrs-core,jembi/openmrs-core,dlahn/openmrs-core,geoff-wasilwa/openmrs-core,Negatu/openmrs-core,rbtracker/openmrs-core,shiangree/openmrs-core,lbl52001/openmrs-core,jembi/openmrs-core,sravanthi17/openmrs-core,milankarunarathne/openmrs-core,maany/openmrs-core,alexei-grigoriev/openmrs-core,rbtracker/openmrs-core,kabariyamilind/openMRSDEV,hoquangtruong/TestMylyn,jvena1/openmrs-core,prisamuel/openmrs-core,maekstr/openmrs-core,naraink/openmrs-core,michaelhofer/openmrs-core,sadhanvejella/openmrs,aj-jaswanth/openmrs-core,alexei-grigoriev/openmrs-core,alexei-grigoriev/openmrs-core,nilusi/Legacy-UI,preethi29/openmrs-core,jembi/openmrs-core,shiangree/openmrs-core,ern2/openmrs-core,sintjuri/openmrs-core,ldf92/openmrs-core,lilo2k/openmrs-core,sintjuri/openmrs-core,ern2/openmrs-core,andyvand/OpenMRS,kabariyamilind/openMRSDEV,ssmusoke/openmrs-core,Bhamni/openmrs-core,ssmusoke/openmrs-core,WANeves/openmrs-core,geoff-wasilwa/openmrs-core,jamesfeshner/openmrs-module,asifur77/openmrs,Ch3ck/openmrs-core,trsorsimoII/openmrs-core,ldf92/openmrs-core,MuhammadSafwan/Stop-Button-Ability,asifur77/openmrs,sintjuri/openmrs-core,spereverziev/openmrs-core,kristopherschmidt/openmrs-core,koskedk/openmrs-core,sravanthi17/openmrs-core,sintjuri/openmrs-core,siddharthkhabia/openmrs-core,kigsmtua/openmrs-core,Winbobob/openmrs-core,kigsmtua/openmrs-core,macorrales/openmrs-core,lilo2k/openmrs-core,rbtracker/openmrs-core,AbhijitParate/openmrs-core,kristopherschmidt/openmrs-core,chethandeshpande/openmrs-core,MitchellBot/openmrs-core,trsorsimoII/openmrs-core,geoff-wasilwa/openmrs-core,lilo2k/openmrs-core,AbhijitParate/openmrs-core,dcmul/openmrs-core,spereverziev/openmrs-core,sadhanvejella/openmrs,ldf92/openmrs-core,foolchan2556/openmrs-core,alexwind26/openmrs-core,maany/openmrs-core,preethi29/openmrs-core,AbhijitParate/openmrs-core,koskedk/openmrs-core,aboutdata/openmrs-core,shiangree/openmrs-core,MitchellBot/openmrs-core,hoquangtruong/TestMylyn,macorrales/openmrs-core,Negatu/openmrs-core,maekstr/openmrs-core,maekstr/openmrs-core,prisamuel/openmrs-core,jamesfeshner/openmrs-module,aboutdata/openmrs-core,AbhijitParate/openmrs-core,dlahn/openmrs-core,lilo2k/openmrs-core,sintjuri/openmrs-core,kigsmtua/openmrs-core,donaldgavis/openmrs-core,prisamuel/openmrs-core,vinayvenu/openmrs-core,donaldgavis/openmrs-core,koskedk/openmrs-core,preethi29/openmrs-core,kristopherschmidt/openmrs-core,jcantu1988/openmrs-core,MitchellBot/openmrs-core,jvena1/openmrs-core,maany/openmrs-core,MuhammadSafwan/Stop-Button-Ability,kigsmtua/openmrs-core,trsorsimoII/openmrs-core,aj-jaswanth/openmrs-core,Bhamni/openmrs-core,AbhijitParate/openmrs-core,donaldgavis/openmrs-core,iLoop2/openmrs-core,foolchan2556/openmrs-core,alexei-grigoriev/openmrs-core,sravanthi17/openmrs-core,aj-jaswanth/openmrs-core,Winbobob/openmrs-core,donaldgavis/openmrs-core,MuhammadSafwan/Stop-Button-Ability,vinayvenu/openmrs-core,jcantu1988/openmrs-core,milankarunarathne/openmrs-core,geoff-wasilwa/openmrs-core,pselle/openmrs-core,Winbobob/openmrs-core,Negatu/openmrs-core,andyvand/OpenMRS,dlahn/openmrs-core,alexwind26/openmrs-core,chethandeshpande/openmrs-core,andyvand/OpenMRS,dlahn/openmrs-core,jamesfeshner/openmrs-module,dlahn/openmrs-core,jcantu1988/openmrs-core,lbl52001/openmrs-core,naraink/openmrs-core,joansmith/openmrs-core,kckc/openmrs-core,nilusi/Legacy-UI,rbtracker/openmrs-core,Winbobob/openmrs-core,hoquangtruong/TestMylyn,MuhammadSafwan/Stop-Button-Ability,siddharthkhabia/openmrs-core,ern2/openmrs-core,trsorsimoII/openmrs-core,chethandeshpande/openmrs-core,ldf92/openmrs-core,koskedk/openmrs-core,naraink/openmrs-core,ssmusoke/openmrs-core,siddharthkhabia/openmrs-core,sadhanvejella/openmrs,koskedk/openmrs-core,alexwind26/openmrs-core,jembi/openmrs-core,macorrales/openmrs-core,lilo2k/openmrs-core,foolchan2556/openmrs-core,chethandeshpande/openmrs-core,dcmul/openmrs-core,aj-jaswanth/openmrs-core,trsorsimoII/openmrs-core,lbl52001/openmrs-core,asifur77/openmrs,jembi/openmrs-core,Openmrs-joel/openmrs-core,sadhanvejella/openmrs,dcmul/openmrs-core,macorrales/openmrs-core,siddharthkhabia/openmrs-core,WANeves/openmrs-core,aj-jaswanth/openmrs-core,Ch3ck/openmrs-core,lilo2k/openmrs-core,kckc/openmrs-core,andyvand/OpenMRS,andyvand/OpenMRS,dcmul/openmrs-core,sadhanvejella/openmrs,prisamuel/openmrs-core,ern2/openmrs-core,jamesfeshner/openmrs-module,spereverziev/openmrs-core,pselle/openmrs-core,kckc/openmrs-core,Openmrs-joel/openmrs-core,maekstr/openmrs-core,aboutdata/openmrs-core,Winbobob/openmrs-core,michaelhofer/openmrs-core,vinayvenu/openmrs-core,joansmith/openmrs-core,pselle/openmrs-core,naraink/openmrs-core,kckc/openmrs-core,Negatu/openmrs-core,MitchellBot/openmrs-core,nilusi/Legacy-UI,AbhijitParate/openmrs-core,pselle/openmrs-core,jembi/openmrs-core,rbtracker/openmrs-core,ldf92/openmrs-core | /**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Scanner;
import java.util.Set;
import liquibase.Liquibase;
import liquibase.changelog.ChangeLogIterator;
import liquibase.changelog.ChangeLogParameters;
import liquibase.changelog.ChangeSet;
import liquibase.changelog.DatabaseChangeLog;
import liquibase.changelog.filter.ContextChangeSetFilter;
import liquibase.changelog.filter.DbmsChangeSetFilter;
import liquibase.changelog.filter.ShouldRunChangeSetFilter;
import liquibase.changelog.visitor.UpdateVisitor;
import liquibase.database.Database;
import liquibase.database.DatabaseFactory;
import liquibase.database.jvm.JdbcConnection;
import liquibase.exception.LiquibaseException;
import liquibase.exception.LockException;
import liquibase.lockservice.LockService;
import liquibase.parser.core.xml.XMLChangeLogSAXParser;
import liquibase.resource.CompositeResourceAccessor;
import liquibase.resource.FileSystemResourceAccessor;
import liquibase.resource.ResourceAccessor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openmrs.annotation.Authorized;
import org.openmrs.api.context.Context;
/**
* This class uses Liquibase to update the database. <br/>
* <br/>
* See /metadata/model/liquibase-update-to-latest.xml for the changes. This class will also run
* arbitrary liquibase xml files on the associated database as well. Details for the database are
* taken from the openmrs runtime properties.
*
* @since 1.5
*/
public class DatabaseUpdater {
private static Log log = LogFactory.getLog(DatabaseUpdater.class);
private static final String CHANGE_LOG_FILE = "liquibase-update-to-latest.xml";
private static final String CONTEXT = "core";
public static final String DATABASE_UPDATES_LOG_FILE = "liquibaseUpdateLogs.txt";
private static Integer authenticatedUserId;
/**
* Holds the update warnings generated by the custom liquibase changesets as they are executed
*/
private static List<String> updateWarnings = null;
/**
* Convenience method to run the changesets using Liquibase to bring the database up to a
* version compatible with the code
*
* @throws InputRequiredException if the changelog file requirest some sort of user input. The
* error object will list of the user prompts and type of data for each prompt
* @see #update(Map)
* @see #executeChangelog(String, Map)
*/
public static void executeChangelog() throws DatabaseUpdateException, InputRequiredException {
executeChangelog(null, null);
}
/**
* Run changesets on database using Liquibase to get the database up to the most recent version
*
* @param changelog the liquibase changelog file to use (or null to use the default file)
* @param userInput nullable map from question to user answer. Used if a call to update(null)
* threw an {@link InputRequiredException}
* @throws DatabaseUpdateException
* @throws InputRequiredException
*/
public static void executeChangelog(String changelog, Map<String, Object> userInput) throws DatabaseUpdateException,
InputRequiredException {
log.debug("Executing changelog: " + changelog);
executeChangelog(changelog, userInput, null);
}
/**
* Interface used for callbacks when updating the database. Implement this interface and pass it
* to {@link DatabaseUpdater#executeChangelog(String, Map, ChangeSetExecutorCallback)}
*/
public interface ChangeSetExecutorCallback {
/**
* This method is called after each changeset is executed.
*
* @param changeSet the liquibase changeset that was just run
* @param numChangeSetsToRun the total number of changesets in the current file
*/
public void executing(ChangeSet changeSet, int numChangeSetsToRun);
}
/**
* Executes the given changelog file. This file is assumed to be on the classpath. If no file is
* given, the default {@link #CHANGE_LOG_FILE} is ran.
*
* @param changelog The string filename of a liquibase changelog xml file to run
* @param userInput nullable map from question to user answer. Used if a call to
* executeChangelog(<String>, null) threw an {@link InputRequiredException}
* @return A list of messages or warnings generated by the executed changesets
* @throws InputRequiredException if the changelog file requirest some sort of user input. The
* error object will list of the user prompts and type of data for each prompt
*/
public static List<String> executeChangelog(String changelog, Map<String, Object> userInput,
ChangeSetExecutorCallback callback) throws DatabaseUpdateException, InputRequiredException {
log.debug("installing the tables into the database");
if (changelog == null)
changelog = CHANGE_LOG_FILE;
try {
return executeChangelog(changelog, CONTEXT, userInput, callback);
}
catch (Exception e) {
throw new DatabaseUpdateException("There was an error while updating the database to the latest. file: "
+ changelog + ". Error: " + e.getMessage(), e);
}
}
/**
* @deprecated use
* {@link #executeChangelog(String, String, Map, ChangeSetExecutorCallback, ClassLoader)}
*/
@Deprecated
public static List<String> executeChangelog(String changeLogFile, String contexts, Map<String, Object> userInput,
ChangeSetExecutorCallback callback) throws Exception {
return executeChangelog(changeLogFile, contexts, userInput, callback, null);
}
/**
* This code was borrowed from the liquibase jar so that we can call the given callback
* function.
*
* @param changeLogFile the file to execute
* @param contexts the liquibase changeset context
* @param userInput answers given by the user
* @param callback the function to call after every changeset
* @param cl {@link ClassLoader} to use to find the changeLogFile (or null to use
* {@link OpenmrsClassLoader})
* @return A list of messages or warnings generated by the executed changesets
* @throws Exception
*/
public static List<String> executeChangelog(String changeLogFile, String contexts, Map<String, Object> userInput,
ChangeSetExecutorCallback callback, ClassLoader cl) throws Exception {
final class OpenmrsUpdateVisitor extends UpdateVisitor {
private ChangeSetExecutorCallback callback;
private int numChangeSetsToRun;
public OpenmrsUpdateVisitor(Database database, ChangeSetExecutorCallback callback, int numChangeSetsToRun) {
super(database);
this.callback = callback;
this.numChangeSetsToRun = numChangeSetsToRun;
}
@Override
public void visit(ChangeSet changeSet, DatabaseChangeLog databaseChangeLog, Database database)
throws LiquibaseException {
if (callback != null)
callback.executing(changeSet, numChangeSetsToRun);
super.visit(changeSet, databaseChangeLog, database);
}
}
if (cl == null)
cl = OpenmrsClassLoader.getInstance();
log.debug("Setting up liquibase object to run changelog: " + changeLogFile);
Liquibase liquibase = getLiquibase(changeLogFile, cl);
int numChangeSetsToRun = liquibase.listUnrunChangeSets(contexts).size();
Database database = null;
LockService lockHandler = null;
try {
database = liquibase.getDatabase();
lockHandler = LockService.getInstance(database);
lockHandler.waitForLock();
ResourceAccessor openmrsFO = new ClassLoaderFileOpener(cl);
ResourceAccessor fsFO = new FileSystemResourceAccessor();
DatabaseChangeLog changeLog = new XMLChangeLogSAXParser().parse(changeLogFile, new ChangeLogParameters(),
new CompositeResourceAccessor(openmrsFO, fsFO));
changeLog.validate(database);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog, new ShouldRunChangeSetFilter(database),
new ContextChangeSetFilter(contexts), new DbmsChangeSetFilter(database));
database.checkDatabaseChangeLogTable(true, changeLog, new String[] { contexts });
logIterator.run(new OpenmrsUpdateVisitor(database, callback, numChangeSetsToRun), database);
}
catch (LiquibaseException e) {
throw e;
}
finally {
try {
lockHandler.releaseLock();
}
catch (Throwable t) {
log.error("Could not release lock", t);
}
try {
database.getConnection().close();
}
catch (Throwable t) {
//pass
}
}
return updateWarnings;
}
/**
* Ask Liquibase if it needs to do any updates. Only looks at the {@link #CHANGE_LOG_FILE}
*
* @return true/false whether database updates are required
* @should always have a valid update to latest file
*/
public static boolean updatesRequired() throws Exception {
log.debug("checking for updates");
List<OpenMRSChangeSet> changesets = getUnrunDatabaseChanges();
// if the db is locked, it means there was a crash
// or someone is executing db updates right now. either way
// returning true here stops the openmrs startup and shows
// the user the maintenance wizard for updates
if (isLocked() && changesets.size() == 0) {
// if there is a db lock but there are no db changes we undo the
// lock
DatabaseUpdater.releaseDatabaseLock();
log.debug("db lock found and released automatically");
return false;
}
return changesets.size() > 0;
}
/**
* Ask Liquibase if it needs to do any updates
*
* @param changeLogFilenames the filenames of all files to search for unrun changesets
* @return true/false whether database updates are required
* @should always have a valid update to latest file
*/
public static boolean updatesRequired(String... changeLogFilenames) throws Exception {
log.debug("checking for updates");
List<OpenMRSChangeSet> changesets = getUnrunDatabaseChanges(changeLogFilenames);
return changesets.size() > 0;
}
/**
* Indicates whether automatic database updates are allowed by this server. Automatic updates
* are disabled by default. In order to enable automatic updates, the admin needs to add
* 'auto_update_database=true' to the runtime properties file.
*
* @return true/false whether the 'auto_update_database' has been enabled.
*/
public static Boolean allowAutoUpdate() {
String allowAutoUpdate = Context.getRuntimeProperties().getProperty(
OpenmrsConstants.AUTO_UPDATE_DATABASE_RUNTIME_PROPERTY, "false");
return "true".equals(allowAutoUpdate);
}
/**
* Takes the default properties defined in /metadata/api/hibernate/hibernate.default.properties
* and merges it into the user-defined runtime properties
*
* @see org.openmrs.api.db.ContextDAO#mergeDefaultRuntimeProperties(java.util.Properties)
*/
private static void mergeDefaultRuntimeProperties(Properties runtimeProperties) {
// loop over runtime properties and precede each with "hibernate" if
// it isn't already
Set<Object> runtimePropertyKeys = new HashSet<Object>();
runtimePropertyKeys.addAll(runtimeProperties.keySet()); // must do it this way to prevent concurrent mod errors
for (Object key : runtimePropertyKeys) {
String prop = (String) key;
String value = (String) runtimeProperties.get(key);
log.trace("Setting property: " + prop + ":" + value);
if (!prop.startsWith("hibernate") && !runtimeProperties.containsKey("hibernate." + prop))
runtimeProperties.setProperty("hibernate." + prop, value);
}
// load in the default hibernate properties from hibernate.default.properties
InputStream propertyStream = null;
try {
Properties props = new Properties();
// TODO: This is a dumb requirement to have hibernate in here. Clean this up
propertyStream = DatabaseUpdater.class.getClassLoader().getResourceAsStream("hibernate.default.properties");
OpenmrsUtil.loadProperties(props, propertyStream);
// add in all default properties that don't exist in the runtime
// properties yet
for (Map.Entry<Object, Object> entry : props.entrySet()) {
if (!runtimeProperties.containsKey(entry.getKey()))
runtimeProperties.put(entry.getKey(), entry.getValue());
}
}
finally {
try {
propertyStream.close();
}
catch (Throwable t) {
// pass
}
}
}
/**
* Get a connection to the database through Liquibase. The calling method /must/ close the
* database connection when finished with this Liquibase object.
* liquibase.getDatabase().getConnection().close()
*
* @param changeLogFile the name of the file to look for the on classpath or filesystem
* @param cl the {@link ClassLoader} to use to find the file (or null to use
* {@link OpenmrsClassLoader})
* @return Liquibase object based on the current connection settings
* @throws Exception
*/
private static Liquibase getLiquibase(String changeLogFile, ClassLoader cl) throws Exception {
Connection connection = null;
try {
connection = getConnection();
}
catch (SQLException e) {
throw new Exception(
"Unable to get a connection to the database. Please check your openmrs runtime properties file and make sure you have the correct connection.username and connection.password set",
e);
}
if (cl == null)
cl = OpenmrsClassLoader.getInstance();
try {
Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(
new JdbcConnection(connection));
database.setDatabaseChangeLogTableName("liquibasechangelog");
database.setDatabaseChangeLogLockTableName("liquibasechangeloglock");
if (connection.getMetaData().getDatabaseProductName().contains("HSQL Database Engine")
|| connection.getMetaData().getDatabaseProductName().contains("H2")) {
// a hack because hsqldb and h2 seem to be checking table names in the metadata section case sensitively
database.setDatabaseChangeLogTableName(database.getDatabaseChangeLogTableName().toUpperCase());
database.setDatabaseChangeLogLockTableName(database.getDatabaseChangeLogLockTableName().toUpperCase());
}
ResourceAccessor openmrsFO = new ClassLoaderFileOpener(cl);
ResourceAccessor fsFO = new FileSystemResourceAccessor();
if (changeLogFile == null)
changeLogFile = CHANGE_LOG_FILE;
database.checkDatabaseChangeLogTable(false, null, null);
return new Liquibase(changeLogFile, new CompositeResourceAccessor(openmrsFO, fsFO), database);
}
catch (Exception e) {
// if an error occurs, close the connection
if (connection != null)
connection.close();
throw e;
}
}
/**
* Gets a database connection for liquibase to do the updates
*
* @return a java.sql.connection based on the current runtime properties
*/
public static Connection getConnection() throws Exception {
Properties props = Context.getRuntimeProperties();
mergeDefaultRuntimeProperties(props);
String driver = props.getProperty("hibernate.connection.driver_class");
String username = props.getProperty("hibernate.connection.username");
String password = props.getProperty("hibernate.connection.password");
String url = props.getProperty("hibernate.connection.url");
// hack for mysql to make sure innodb tables are created
if (url.contains("mysql") && !url.contains("InnoDB")) {
url = url + "&sessionVariables=storage_engine=InnoDB";
}
Class.forName(driver);
return DriverManager.getConnection(url, username, password);
}
/**
* Represents each change in the liquibase-update-to-latest
*/
public static class OpenMRSChangeSet {
private String id;
private String author;
private String comments;
private String description;
private ChangeSet.RunStatus runStatus;
private Date ranDate;
/**
* Create an OpenmrsChangeSet from the given changeset
*
* @param changeSet
* @param database
*/
public OpenMRSChangeSet(ChangeSet changeSet, Database database) throws Exception {
setId(changeSet.getId());
setAuthor(changeSet.getAuthor());
setComments(changeSet.getComments());
setDescription(changeSet.getDescription());
setRunStatus(database.getRunStatus(changeSet));
setRanDate(database.getRanDate(changeSet));
}
/**
* @return the author
*/
public String getAuthor() {
return author;
}
/**
* @param author the author to set
*/
public void setAuthor(String author) {
this.author = author;
}
/**
* @return the comments
*/
public String getComments() {
return comments;
}
/**
* @param comments the comments to set
*/
public void setComments(String comments) {
this.comments = comments;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return the runStatus
*/
public ChangeSet.RunStatus getRunStatus() {
return runStatus;
}
/**
* @param runStatus the runStatus to set
*/
public void setRunStatus(ChangeSet.RunStatus runStatus) {
this.runStatus = runStatus;
}
/**
* @return the ranDate
*/
public Date getRanDate() {
return ranDate;
}
/**
* @param ranDate the ranDate to set
*/
public void setRanDate(Date ranDate) {
this.ranDate = ranDate;
}
/**
* @return the id
*/
public String getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(String id) {
this.id = id;
}
}
/**
* Looks at the current liquibase-update-to-latest.xml file and then checks the database to see
* if they have been run.
*
* @return list of changesets that both have and haven't been run
*/
@Authorized(PrivilegeConstants.VIEW_DATABASE_CHANGES)
public static List<OpenMRSChangeSet> getDatabaseChanges() throws Exception {
Database database = null;
try {
Liquibase liquibase = getLiquibase(CHANGE_LOG_FILE, null);
database = liquibase.getDatabase();
DatabaseChangeLog changeLog = new XMLChangeLogSAXParser().parse(CHANGE_LOG_FILE, new ChangeLogParameters(),
liquibase.getFileOpener());
List<ChangeSet> changeSets = changeLog.getChangeSets();
List<OpenMRSChangeSet> results = new ArrayList<OpenMRSChangeSet>();
for (ChangeSet changeSet : changeSets) {
OpenMRSChangeSet omrschangeset = new OpenMRSChangeSet(changeSet, database);
results.add(omrschangeset);
}
return results;
}
finally {
try {
if (database != null) {
database.getConnection().close();
}
}
catch (Throwable t) {
//pass
}
}
}
/**
* @see DatabaseUpdater#getUnrunDatabaseChanges(String...)
*/
@Authorized(PrivilegeConstants.VIEW_DATABASE_CHANGES)
public static List<OpenMRSChangeSet> getUnrunDatabaseChanges() throws Exception {
return getUnrunDatabaseChanges(CHANGE_LOG_FILE);
}
/**
* Looks at the specified liquibase change log files and returns all changesets in the files
* that have not been run on the database yet. If no argument is specified, then it looks at the
* current liquibase-update-to-latest.xml file
*
* @param changeLogFilenames the filenames of all files to search for unrun changesets
* @return
* @throws Exception
*/
@Authorized(PrivilegeConstants.VIEW_DATABASE_CHANGES)
public static List<OpenMRSChangeSet> getUnrunDatabaseChanges(String... changeLogFilenames) throws Exception {
log.debug("Getting unrun changesets");
Database database = null;
try {
if (changeLogFilenames == null)
throw new IllegalArgumentException("changeLogFilenames cannot be null");
//if no argument, look ONLY in liquibase-update-to-latest.xml
if (changeLogFilenames.length == 0)
changeLogFilenames = new String[] { CHANGE_LOG_FILE };
List<OpenMRSChangeSet> results = new ArrayList<OpenMRSChangeSet>();
for (String changelogFile : changeLogFilenames) {
Liquibase liquibase = getLiquibase(changelogFile, null);
database = liquibase.getDatabase();
List<ChangeSet> changeSets = liquibase.listUnrunChangeSets(CONTEXT);
for (ChangeSet changeSet : changeSets) {
OpenMRSChangeSet omrschangeset = new OpenMRSChangeSet(changeSet, database);
results.add(omrschangeset);
}
}
return results;
}
catch (Exception e) {
throw new RuntimeException("Error occurred while trying to get the updates needed for the database. "
+ e.getMessage(), e);
}
finally {
try {
database.getConnection().close();
}
catch (Throwable t) {
//pass
}
}
}
/**
* @return the authenticatedUserId
*/
public static Integer getAuthenticatedUserId() {
return authenticatedUserId;
}
/**
* @param authenticatedUserId the authenticatedUserId to set
*/
public static void setAuthenticatedUserId(Integer userId) {
authenticatedUserId = userId;
}
/**
* This method is called by an executing custom changeset to register warning messages.
*
* @param warnings list of warnings to append to the end of the current list
*/
public static void reportUpdateWarnings(List<String> warnings) {
if (updateWarnings == null)
updateWarnings = new LinkedList<String>();
updateWarnings.addAll(warnings);
}
/**
* This method writes the given text to the database updates log file located in the application
* data directory.
*
* @param the text to be written to the file
*/
public static void writeUpdateMessagesToFile(String text) {
PrintWriter writer = null;
Scanner scanner = null;
File destFile = new File(OpenmrsUtil.getApplicationDataDirectory(), DatabaseUpdater.DATABASE_UPDATES_LOG_FILE);
try {
String lineSeparator = System.getProperty("line.separator");
Date date = Calendar.getInstance().getTime();
StringBuilder sb = new StringBuilder();
if (destFile.isFile()) {
scanner = new Scanner(destFile);
while (scanner.hasNextLine())
sb.append(scanner.nextLine()).append(lineSeparator);
sb.append(lineSeparator);
sb.append(lineSeparator);
if (scanner.ioException() != null)
log.warn("Some error(s) occured while reading messages from the database update log file", scanner
.ioException());
}
writer = new PrintWriter(destFile);
sb.append("********** START OF DATABASE UPDATE LOGS AS AT " + date + " **********");
sb.append(lineSeparator);
sb.append(lineSeparator);
sb.append(text);
sb.append(lineSeparator);
sb.append(lineSeparator);
sb.append("*********** END OF DATABASE UPDATE LOGS AS AT " + date + " ***********");
writer.write(sb.toString());
//check if there was an error while writing to the file
if (writer.checkError())
log.warn("An Error occured while writing warnings to the database update log file'");
}
catch (FileNotFoundException e) {
log.warn("Generated error", e);
}
finally {
if (writer != null)
writer.close();
if (scanner != null)
scanner.close();
}
}
/**
* This method releases the liquibase db lock after a crashed database update. First, it
* checks whether "liquibasechangeloglock" table exists in db. If so, it will check
* whether the database is locked. If thats also true, this means that last attempted db
* update crashed.<br/>
* <br/>
* This should only be called if the user is sure that no one else is currently running
* database updates. This method should be used if there was a db crash while updates
* were being written and the lock table was never cleaned up.
*
* @throws LockException
*/
public static synchronized void releaseDatabaseLock() throws LockException {
Database database = null;
try {
Liquibase liquibase = getLiquibase(null, null);
database = liquibase.getDatabase();
if (database.hasDatabaseChangeLogLockTable()) {
if (isLocked()) {
LockService.getInstance(database).forceReleaseLock();
}
}
}
catch (Exception e) {
throw new LockException(e);
}
finally {
try {
database.getConnection().close();
}
catch (Throwable t) {
// pass
}
}
}
/**
* This method currently checks the liquibasechangeloglock table to see if there is a row
* with a lock in it. This uses the liquibase API to do this
*
* @return true if database is currently locked
*/
public static boolean isLocked() {
Database database = null;
try {
Liquibase liquibase = getLiquibase(null, null);
database = liquibase.getDatabase();
return LockService.getInstance(database).listLocks().length > 0;
}
catch (Exception e) {
return false;
}
finally {
try {
database.getConnection().close();
}
catch (Throwable t) {
// pass
}
}
}
}
| api/src/main/java/org/openmrs/util/DatabaseUpdater.java | /**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Scanner;
import java.util.Set;
import liquibase.Liquibase;
import liquibase.changelog.ChangeLogIterator;
import liquibase.changelog.ChangeLogParameters;
import liquibase.changelog.ChangeSet;
import liquibase.changelog.DatabaseChangeLog;
import liquibase.changelog.filter.ContextChangeSetFilter;
import liquibase.changelog.filter.DbmsChangeSetFilter;
import liquibase.changelog.filter.ShouldRunChangeSetFilter;
import liquibase.changelog.visitor.UpdateVisitor;
import liquibase.database.Database;
import liquibase.database.DatabaseFactory;
import liquibase.database.jvm.JdbcConnection;
import liquibase.exception.LiquibaseException;
import liquibase.exception.LockException;
import liquibase.lockservice.LockService;
import liquibase.parser.core.xml.XMLChangeLogSAXParser;
import liquibase.resource.CompositeResourceAccessor;
import liquibase.resource.FileSystemResourceAccessor;
import liquibase.resource.ResourceAccessor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openmrs.annotation.Authorized;
import org.openmrs.api.context.Context;
/**
* This class uses Liquibase to update the database. <br/>
* <br/>
* See /metadata/model/liquibase-update-to-latest.xml for the changes. This class will also run
* arbitrary liquibase xml files on the associated database as well. Details for the database are
* taken from the openmrs runtime properties.
*
* @since 1.5
*/
public class DatabaseUpdater {
private static Log log = LogFactory.getLog(DatabaseUpdater.class);
private static final String CHANGE_LOG_FILE = "liquibase-update-to-latest.xml";
private static final String CONTEXT = "core";
public static final String DATABASE_UPDATES_LOG_FILE = "liquibaseUpdateLogs.txt";
private static Integer authenticatedUserId;
/**
* Holds the update warnings generated by the custom liquibase changesets as they are executed
*/
private static List<String> updateWarnings = null;
/**
* Convenience method to run the changesets using Liquibase to bring the database up to a
* version compatible with the code
*
* @throws InputRequiredException if the changelog file requirest some sort of user input. The
* error object will list of the user prompts and type of data for each prompt
* @see #update(Map)
* @see #executeChangelog(String, Map)
*/
public static void executeChangelog() throws DatabaseUpdateException, InputRequiredException {
executeChangelog(null, null);
}
/**
* Run changesets on database using Liquibase to get the database up to the most recent version
*
* @param changelog the liquibase changelog file to use (or null to use the default file)
* @param userInput nullable map from question to user answer. Used if a call to update(null)
* threw an {@link InputRequiredException}
* @throws DatabaseUpdateException
* @throws InputRequiredException
*/
public static void executeChangelog(String changelog, Map<String, Object> userInput) throws DatabaseUpdateException,
InputRequiredException {
log.debug("Executing changelog: " + changelog);
executeChangelog(changelog, userInput, null);
}
/**
* Interface used for callbacks when updating the database. Implement this interface and pass it
* to {@link DatabaseUpdater#executeChangelog(String, Map, ChangeSetExecutorCallback)}
*/
public interface ChangeSetExecutorCallback {
/**
* This method is called after each changeset is executed.
*
* @param changeSet the liquibase changeset that was just run
* @param numChangeSetsToRun the total number of changesets in the current file
*/
public void executing(ChangeSet changeSet, int numChangeSetsToRun);
}
/**
* Executes the given changelog file. This file is assumed to be on the classpath. If no file is
* given, the default {@link #CHANGE_LOG_FILE} is ran.
*
* @param changelog The string filename of a liquibase changelog xml file to run
* @param userInput nullable map from question to user answer. Used if a call to
* executeChangelog(<String>, null) threw an {@link InputRequiredException}
* @return A list of messages or warnings generated by the executed changesets
* @throws InputRequiredException if the changelog file requirest some sort of user input. The
* error object will list of the user prompts and type of data for each prompt
*/
public static List<String> executeChangelog(String changelog, Map<String, Object> userInput,
ChangeSetExecutorCallback callback) throws DatabaseUpdateException, InputRequiredException {
log.debug("installing the tables into the database");
if (changelog == null)
changelog = CHANGE_LOG_FILE;
try {
return executeChangelog(changelog, CONTEXT, userInput, callback);
}
catch (Exception e) {
throw new DatabaseUpdateException("There was an error while updating the database to the latest. file: "
+ changelog + ". Error: " + e.getMessage(), e);
}
}
/**
* @deprecated use
* {@link #executeChangelog(String, String, Map, ChangeSetExecutorCallback, ClassLoader)}
*/
@Deprecated
public static List<String> executeChangelog(String changeLogFile, String contexts, Map<String, Object> userInput,
ChangeSetExecutorCallback callback) throws Exception {
return executeChangelog(changeLogFile, contexts, userInput, callback, null);
}
/**
* This code was borrowed from the liquibase jar so that we can call the given callback
* function.
*
* @param changeLogFile the file to execute
* @param contexts the liquibase changeset context
* @param userInput answers given by the user
* @param callback the function to call after every changeset
* @param cl {@link ClassLoader} to use to find the changeLogFile (or null to use
* {@link OpenmrsClassLoader})
* @return A list of messages or warnings generated by the executed changesets
* @throws Exception
*/
public static List<String> executeChangelog(String changeLogFile, String contexts, Map<String, Object> userInput,
ChangeSetExecutorCallback callback, ClassLoader cl) throws Exception {
final class OpenmrsUpdateVisitor extends UpdateVisitor {
private ChangeSetExecutorCallback callback;
private int numChangeSetsToRun;
public OpenmrsUpdateVisitor(Database database, ChangeSetExecutorCallback callback, int numChangeSetsToRun) {
super(database);
this.callback = callback;
this.numChangeSetsToRun = numChangeSetsToRun;
}
@Override
public void visit(ChangeSet changeSet, DatabaseChangeLog databaseChangeLog, Database database)
throws LiquibaseException {
if (callback != null)
callback.executing(changeSet, numChangeSetsToRun);
super.visit(changeSet, databaseChangeLog, database);
}
}
if (cl == null)
cl = OpenmrsClassLoader.getInstance();
log.debug("Setting up liquibase object to run changelog: " + changeLogFile);
Liquibase liquibase = getLiquibase(changeLogFile, cl);
int numChangeSetsToRun = liquibase.listUnrunChangeSets(contexts).size();
Database database = liquibase.getDatabase();
LockService lockHandler = LockService.getInstance(database);
lockHandler.waitForLock();
try {
ResourceAccessor openmrsFO = new ClassLoaderFileOpener(cl);
ResourceAccessor fsFO = new FileSystemResourceAccessor();
DatabaseChangeLog changeLog = new XMLChangeLogSAXParser().parse(changeLogFile, new ChangeLogParameters(),
new CompositeResourceAccessor(openmrsFO, fsFO));
changeLog.validate(database);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog, new ShouldRunChangeSetFilter(database),
new ContextChangeSetFilter(contexts), new DbmsChangeSetFilter(database));
database.checkDatabaseChangeLogTable(true, changeLog, new String[] { contexts });
logIterator.run(new OpenmrsUpdateVisitor(database, callback, numChangeSetsToRun), database);
}
catch (LiquibaseException e) {
throw e;
}
finally {
try {
lockHandler.releaseLock();
}
catch (LockException e) {
log.error("Could not release lock", e);
}
try {
database.getConnection().close();
}
catch (Throwable t) {
//pass
}
}
return updateWarnings;
}
/**
* Ask Liquibase if it needs to do any updates. Only looks at the {@link #CHANGE_LOG_FILE}
*
* @return true/false whether database updates are required
* @should always have a valid update to latest file
*/
public static boolean updatesRequired() throws Exception {
log.debug("checking for updates");
List<OpenMRSChangeSet> changesets = getUnrunDatabaseChanges();
Database database = null;
try {
Liquibase liquibase = getLiquibase(null, null);
database = liquibase.getDatabase();
// if the db is locked, it means there was a crash
// or someone is executing db updates right now. either way
// returning true here stops the openmrs startup and shows
// the user the maintenance wizard for updates
if (isLocked()) {
// if there is a db lock but there are no db changes we undo the
// lock
if (changesets.size() == 0) {
DatabaseUpdater.releaseDatabaseLock();
return false;
} else {
return true;
}
}
}
catch (Exception e) {
// do nothing
}
finally {
try {
database.getConnection().close();
}
catch (Throwable t) {
// pass
}
}
return changesets.size() > 0;
}
/**
* Ask Liquibase if it needs to do any updates
*
* @param changeLogFilenames the filenames of all files to search for unrun changesets
* @return true/false whether database updates are required
* @should always have a valid update to latest file
*/
public static boolean updatesRequired(String... changeLogFilenames) throws Exception {
log.debug("checking for updates");
List<OpenMRSChangeSet> changesets = getUnrunDatabaseChanges(changeLogFilenames);
return changesets.size() > 0;
}
/**
* Indicates whether automatic database updates are allowed by this server. Automatic updates
* are disabled by default. In order to enable automatic updates, the admin needs to add
* 'auto_update_database=true' to the runtime properties file.
*
* @return true/false whether the 'auto_update_database' has been enabled.
*/
public static Boolean allowAutoUpdate() {
String allowAutoUpdate = Context.getRuntimeProperties().getProperty(
OpenmrsConstants.AUTO_UPDATE_DATABASE_RUNTIME_PROPERTY, "false");
return "true".equals(allowAutoUpdate);
}
/**
* Takes the default properties defined in /metadata/api/hibernate/hibernate.default.properties
* and merges it into the user-defined runtime properties
*
* @see org.openmrs.api.db.ContextDAO#mergeDefaultRuntimeProperties(java.util.Properties)
*/
private static void mergeDefaultRuntimeProperties(Properties runtimeProperties) {
// loop over runtime properties and precede each with "hibernate" if
// it isn't already
Set<Object> runtimePropertyKeys = new HashSet<Object>();
runtimePropertyKeys.addAll(runtimeProperties.keySet()); // must do it this way to prevent concurrent mod errors
for (Object key : runtimePropertyKeys) {
String prop = (String) key;
String value = (String) runtimeProperties.get(key);
log.trace("Setting property: " + prop + ":" + value);
if (!prop.startsWith("hibernate") && !runtimeProperties.containsKey("hibernate." + prop))
runtimeProperties.setProperty("hibernate." + prop, value);
}
// load in the default hibernate properties from hibernate.default.properties
InputStream propertyStream = null;
try {
Properties props = new Properties();
// TODO: This is a dumb requirement to have hibernate in here. Clean this up
propertyStream = DatabaseUpdater.class.getClassLoader().getResourceAsStream("hibernate.default.properties");
OpenmrsUtil.loadProperties(props, propertyStream);
// add in all default properties that don't exist in the runtime
// properties yet
for (Map.Entry<Object, Object> entry : props.entrySet()) {
if (!runtimeProperties.containsKey(entry.getKey()))
runtimeProperties.put(entry.getKey(), entry.getValue());
}
}
finally {
try {
propertyStream.close();
}
catch (Throwable t) {
// pass
}
}
}
/**
* Get a connection to the database through Liquibase. The calling method /must/ close the
* database connection when finished with this Liquibase object.
* liquibase.getDatabase().getConnection().close()
*
* @param changeLogFile the name of the file to look for the on classpath or filesystem
* @param cl the {@link ClassLoader} to use to find the file (or null to use
* {@link OpenmrsClassLoader})
* @return Liquibase object based on the current connection settings
* @throws Exception
*/
private static Liquibase getLiquibase(String changeLogFile, ClassLoader cl) throws Exception {
Connection connection = null;
try {
connection = getConnection();
}
catch (SQLException e) {
throw new Exception(
"Unable to get a connection to the database. Please check your openmrs runtime properties file and make sure you have the correct connection.username and connection.password set",
e);
}
if (cl == null)
cl = OpenmrsClassLoader.getInstance();
try {
Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(
new JdbcConnection(connection));
database.setDatabaseChangeLogTableName("liquibasechangelog");
database.setDatabaseChangeLogLockTableName("liquibasechangeloglock");
if (connection.getMetaData().getDatabaseProductName().contains("HSQL Database Engine")
|| connection.getMetaData().getDatabaseProductName().contains("H2")) {
// a hack because hsqldb and h2 seem to be checking table names in the metadata section case sensitively
database.setDatabaseChangeLogTableName(database.getDatabaseChangeLogTableName().toUpperCase());
database.setDatabaseChangeLogLockTableName(database.getDatabaseChangeLogLockTableName().toUpperCase());
}
ResourceAccessor openmrsFO = new ClassLoaderFileOpener(cl);
ResourceAccessor fsFO = new FileSystemResourceAccessor();
if (changeLogFile == null)
changeLogFile = CHANGE_LOG_FILE;
database.checkDatabaseChangeLogTable(false, null, null);
return new Liquibase(changeLogFile, new CompositeResourceAccessor(openmrsFO, fsFO), database);
}
catch (Exception e) {
// if an error occurs, close the connection
if (connection != null)
connection.close();
throw e;
}
}
/**
* Gets a database connection for liquibase to do the updates
*
* @return a java.sql.connection based on the current runtime properties
*/
public static Connection getConnection() throws Exception {
Properties props = Context.getRuntimeProperties();
mergeDefaultRuntimeProperties(props);
String driver = props.getProperty("hibernate.connection.driver_class");
String username = props.getProperty("hibernate.connection.username");
String password = props.getProperty("hibernate.connection.password");
String url = props.getProperty("hibernate.connection.url");
// hack for mysql to make sure innodb tables are created
if (url.contains("mysql") && !url.contains("InnoDB")) {
url = url + "&sessionVariables=storage_engine=InnoDB";
}
Class.forName(driver);
return DriverManager.getConnection(url, username, password);
}
/**
* Represents each change in the liquibase-update-to-latest
*/
public static class OpenMRSChangeSet {
private String id;
private String author;
private String comments;
private String description;
private ChangeSet.RunStatus runStatus;
private Date ranDate;
/**
* Create an OpenmrsChangeSet from the given changeset
*
* @param changeSet
* @param database
*/
public OpenMRSChangeSet(ChangeSet changeSet, Database database) throws Exception {
setId(changeSet.getId());
setAuthor(changeSet.getAuthor());
setComments(changeSet.getComments());
setDescription(changeSet.getDescription());
setRunStatus(database.getRunStatus(changeSet));
setRanDate(database.getRanDate(changeSet));
}
/**
* @return the author
*/
public String getAuthor() {
return author;
}
/**
* @param author the author to set
*/
public void setAuthor(String author) {
this.author = author;
}
/**
* @return the comments
*/
public String getComments() {
return comments;
}
/**
* @param comments the comments to set
*/
public void setComments(String comments) {
this.comments = comments;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return the runStatus
*/
public ChangeSet.RunStatus getRunStatus() {
return runStatus;
}
/**
* @param runStatus the runStatus to set
*/
public void setRunStatus(ChangeSet.RunStatus runStatus) {
this.runStatus = runStatus;
}
/**
* @return the ranDate
*/
public Date getRanDate() {
return ranDate;
}
/**
* @param ranDate the ranDate to set
*/
public void setRanDate(Date ranDate) {
this.ranDate = ranDate;
}
/**
* @return the id
*/
public String getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(String id) {
this.id = id;
}
}
/**
* Looks at the current liquibase-update-to-latest.xml file and then checks the database to see
* if they have been run.
*
* @return list of changesets that both have and haven't been run
*/
@Authorized(PrivilegeConstants.VIEW_DATABASE_CHANGES)
public static List<OpenMRSChangeSet> getDatabaseChanges() throws Exception {
Database database = null;
try {
Liquibase liquibase = getLiquibase(CHANGE_LOG_FILE, null);
database = liquibase.getDatabase();
DatabaseChangeLog changeLog = new XMLChangeLogSAXParser().parse(CHANGE_LOG_FILE, new ChangeLogParameters(),
liquibase.getFileOpener());
List<ChangeSet> changeSets = changeLog.getChangeSets();
List<OpenMRSChangeSet> results = new ArrayList<OpenMRSChangeSet>();
for (ChangeSet changeSet : changeSets) {
OpenMRSChangeSet omrschangeset = new OpenMRSChangeSet(changeSet, database);
results.add(omrschangeset);
}
return results;
}
finally {
try {
if (database != null) {
database.getConnection().close();
}
}
catch (Throwable t) {
//pass
}
}
}
/**
* @see DatabaseUpdater#getUnrunDatabaseChanges(String...)
*/
@Authorized(PrivilegeConstants.VIEW_DATABASE_CHANGES)
public static List<OpenMRSChangeSet> getUnrunDatabaseChanges() throws Exception {
return getUnrunDatabaseChanges(CHANGE_LOG_FILE);
}
/**
* Looks at the specified liquibase change log files and returns all changesets in the files
* that have not been run on the database yet. If no argument is specified, then it looks at the
* current liquibase-update-to-latest.xml file
*
* @param changeLogFilenames the filenames of all files to search for unrun changesets
* @return
* @throws Exception
*/
@Authorized(PrivilegeConstants.VIEW_DATABASE_CHANGES)
public static List<OpenMRSChangeSet> getUnrunDatabaseChanges(String... changeLogFilenames) throws Exception {
log.debug("Getting unrun changesets");
Database database = null;
try {
if (changeLogFilenames == null)
throw new IllegalArgumentException("changeLogFilenames cannot be null");
//if no argument, look ONLY in liquibase-update-to-latest.xml
if (changeLogFilenames.length == 0)
changeLogFilenames = new String[] { CHANGE_LOG_FILE };
List<OpenMRSChangeSet> results = new ArrayList<OpenMRSChangeSet>();
for (String changelogFile : changeLogFilenames) {
Liquibase liquibase = getLiquibase(changelogFile, null);
database = liquibase.getDatabase();
List<ChangeSet> changeSets = liquibase.listUnrunChangeSets(CONTEXT);
for (ChangeSet changeSet : changeSets) {
OpenMRSChangeSet omrschangeset = new OpenMRSChangeSet(changeSet, database);
results.add(omrschangeset);
}
}
return results;
}
catch (Exception e) {
throw new RuntimeException("Error occurred while trying to get the updates needed for the database. "
+ e.getMessage(), e);
}
finally {
try {
database.getConnection().close();
}
catch (Throwable t) {
//pass
}
}
}
/**
* @return the authenticatedUserId
*/
public static Integer getAuthenticatedUserId() {
return authenticatedUserId;
}
/**
* @param authenticatedUserId the authenticatedUserId to set
*/
public static void setAuthenticatedUserId(Integer userId) {
authenticatedUserId = userId;
}
/**
* This method is called by an executing custom changeset to register warning messages.
*
* @param warnings list of warnings to append to the end of the current list
*/
public static void reportUpdateWarnings(List<String> warnings) {
if (updateWarnings == null)
updateWarnings = new LinkedList<String>();
updateWarnings.addAll(warnings);
}
/**
* This method writes the given text to the database updates log file located in the application
* data directory.
*
* @param the text to be written to the file
*/
public static void writeUpdateMessagesToFile(String text) {
PrintWriter writer = null;
Scanner scanner = null;
File destFile = new File(OpenmrsUtil.getApplicationDataDirectory(), DatabaseUpdater.DATABASE_UPDATES_LOG_FILE);
try {
String lineSeparator = System.getProperty("line.separator");
Date date = Calendar.getInstance().getTime();
StringBuilder sb = new StringBuilder();
if (destFile.isFile()) {
scanner = new Scanner(destFile);
while (scanner.hasNextLine())
sb.append(scanner.nextLine()).append(lineSeparator);
sb.append(lineSeparator);
sb.append(lineSeparator);
if (scanner.ioException() != null)
log.warn("Some error(s) occured while reading messages from the database update log file", scanner
.ioException());
}
writer = new PrintWriter(destFile);
sb.append("********** START OF DATABASE UPDATE LOGS AS AT " + date + " **********");
sb.append(lineSeparator);
sb.append(lineSeparator);
sb.append(text);
sb.append(lineSeparator);
sb.append(lineSeparator);
sb.append("*********** END OF DATABASE UPDATE LOGS AS AT " + date + " ***********");
writer.write(sb.toString());
//check if there was an error while writing to the file
if (writer.checkError())
log.warn("An Error occured while writing warnings to the database update log file'");
}
catch (FileNotFoundException e) {
log.warn("Generated error", e);
}
finally {
if (writer != null)
writer.close();
if (scanner != null)
scanner.close();
}
}
/**
* This method releases the liquibase db lock after a crashed database update. First, it
* checks whether "liquibasechangeloglock" table exists in db. If so, it will check
* whether the database is locked. If thats also true, this means that last attempted db
* update crashed.<br/>
* <br/>
* This should only be called if the user is sure that no one else is currently running
* database updates. This method should be used if there was a db crash while updates
* were being written and the lock table was never cleaned up.
*
* @throws LockException
*/
public static synchronized void releaseDatabaseLock() throws LockException {
Database database = null;
try {
Liquibase liquibase = getLiquibase(null, null);
database = liquibase.getDatabase();
if (database.hasDatabaseChangeLogLockTable()) {
if (isLocked()) {
LockService.getInstance(database).forceReleaseLock();
}
}
}
catch (Exception e) {
throw new LockException(e);
}
}
/**
* This method currently checks the liquibasechangeloglock table to see if there is a row
* with a lock in it. This uses the liquibase API to do this
*
* @return true if database is currently locked
*/
public static boolean isLocked() {
Database database = null;
try {
Liquibase liquibase = getLiquibase(null, null);
database = liquibase.getDatabase();
Boolean locked = LockService.getInstance(database).listLocks().length > 0;
// if there is a db lock but there are no db changes we undo the lock
if (locked && DatabaseUpdater.getUnrunDatabaseChanges().size() == 0) {
DatabaseUpdater.releaseDatabaseLock();
locked = Boolean.FALSE;
}
return locked;
}
catch (Exception e) {
return false;
}
}
}
| Followup fix from review for Clarify what the startup error "Waiting for changelog lock...." means - TRUNK-1637
git-svn-id: ce3478dfdc990238714fcdf4fc6855b7489218cf@24961 5bac5841-c719-aa4e-b3fe-cce5062f897a
| api/src/main/java/org/openmrs/util/DatabaseUpdater.java | Followup fix from review for Clarify what the startup error "Waiting for changelog lock...." means - TRUNK-1637 | <ide><path>pi/src/main/java/org/openmrs/util/DatabaseUpdater.java
<ide> log.debug("Setting up liquibase object to run changelog: " + changeLogFile);
<ide> Liquibase liquibase = getLiquibase(changeLogFile, cl);
<ide> int numChangeSetsToRun = liquibase.listUnrunChangeSets(contexts).size();
<del> Database database = liquibase.getDatabase();
<del>
<del> LockService lockHandler = LockService.getInstance(database);
<del> lockHandler.waitForLock();
<del>
<del> try {
<add> Database database = null;
<add> LockService lockHandler = null;
<add>
<add> try {
<add> database = liquibase.getDatabase();
<add> lockHandler = LockService.getInstance(database);
<add> lockHandler.waitForLock();
<add>
<ide> ResourceAccessor openmrsFO = new ClassLoaderFileOpener(cl);
<ide> ResourceAccessor fsFO = new FileSystemResourceAccessor();
<ide>
<ide> try {
<ide> lockHandler.releaseLock();
<ide> }
<del> catch (LockException e) {
<del> log.error("Could not release lock", e);
<add> catch (Throwable t) {
<add> log.error("Could not release lock", t);
<ide> }
<ide> try {
<ide> database.getConnection().close();
<ide> public static boolean updatesRequired() throws Exception {
<ide> log.debug("checking for updates");
<ide> List<OpenMRSChangeSet> changesets = getUnrunDatabaseChanges();
<del> Database database = null;
<del> try {
<del>
<del> Liquibase liquibase = getLiquibase(null, null);
<del> database = liquibase.getDatabase();
<del>
<del> // if the db is locked, it means there was a crash
<del> // or someone is executing db updates right now. either way
<del> // returning true here stops the openmrs startup and shows
<del> // the user the maintenance wizard for updates
<del> if (isLocked()) {
<del> // if there is a db lock but there are no db changes we undo the
<del> // lock
<del> if (changesets.size() == 0) {
<del> DatabaseUpdater.releaseDatabaseLock();
<del> return false;
<del> } else {
<del> return true;
<del> }
<del> }
<del> }
<del> catch (Exception e) {
<del> // do nothing
<del> }
<del> finally {
<del> try {
<del> database.getConnection().close();
<del> }
<del> catch (Throwable t) {
<del> // pass
<del> }
<del> }
<add>
<add> // if the db is locked, it means there was a crash
<add> // or someone is executing db updates right now. either way
<add> // returning true here stops the openmrs startup and shows
<add> // the user the maintenance wizard for updates
<add> if (isLocked() && changesets.size() == 0) {
<add> // if there is a db lock but there are no db changes we undo the
<add> // lock
<add> DatabaseUpdater.releaseDatabaseLock();
<add> log.debug("db lock found and released automatically");
<add> return false;
<add> }
<add>
<ide> return changesets.size() > 0;
<ide> }
<ide>
<ide> catch (Exception e) {
<ide> throw new LockException(e);
<ide> }
<add> finally {
<add> try {
<add> database.getConnection().close();
<add> }
<add> catch (Throwable t) {
<add> // pass
<add> }
<add> }
<ide> }
<ide>
<ide> /**
<ide> try {
<ide> Liquibase liquibase = getLiquibase(null, null);
<ide> database = liquibase.getDatabase();
<del> Boolean locked = LockService.getInstance(database).listLocks().length > 0;
<del> // if there is a db lock but there are no db changes we undo the lock
<del> if (locked && DatabaseUpdater.getUnrunDatabaseChanges().size() == 0) {
<del> DatabaseUpdater.releaseDatabaseLock();
<del> locked = Boolean.FALSE;
<del> }
<del> return locked;
<add> return LockService.getInstance(database).listLocks().length > 0;
<ide> }
<ide> catch (Exception e) {
<ide> return false;
<ide> }
<add> finally {
<add> try {
<add> database.getConnection().close();
<add> }
<add> catch (Throwable t) {
<add> // pass
<add> }
<add> }
<ide> }
<ide> } |
|
Java | epl-1.0 | 3061df6d43c4f6c8817fd8a6519693c7b9187225 | 0 | jtrfp/terminal-recall,jtrfp/terminal-recall,jtrfp/terminal-recall | src/test/java/org/jtrfp/trcl/pool/PartitionedIndexPoolTest.java | /*******************************************************************************
* This file is part of TERMINAL RECALL
* Copyright (c) 2012-2015 Chuck Ritola
* Part of the jTRFP.org project
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Public License v3.0
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/gpl.html
*
* Contributors:
* chuck - initial API and implementation
******************************************************************************/
package org.jtrfp.trcl.pool;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.Collection;
import org.jtrfp.trcl.pool.PartitionedIndexPool.Entry;
import org.jtrfp.trcl.pool.PartitionedIndexPool.Partition;
import org.jtrfp.trcl.pool.PartitionedIndexPool.UnusedIndexLimitBehavior;
import com.ochafik.util.listenable.CollectionEvent;
import com.ochafik.util.listenable.CollectionListener;
import com.ochafik.util.listenable.ListenableCollection;
import com.ochafik.util.listenable.ListenableList;
public abstract class PartitionedIndexPoolTest {
protected PartitionedIndexPool<TestObject> subject;
protected class TestObject{
}//end TestObject
protected abstract PartitionedIndexPool<TestObject> newSubject();
public void testNewPartition() {
Partition<TestObject> tp = subject.newPartition();
assertNotNull(tp);
}
public void testRemovePartition() {
Partition<TestObject> tp = subject.newPartition();
assertTrue(subject.getPartitions().contains(tp));
assertTrue(tp.isValid());
assertEquals(subject,subject.removePartition(tp));
assertFalse(subject.getPartitions().contains(tp));
assertFalse(tp.isValid());
}
public void testRemoveAllPartitions() {
for(int i=0; i<10; i++)
subject.newPartition();
assertEquals(10, subject.getPartitions().size());
Collection<Partition<TestObject>> parts = new ArrayList<Partition<TestObject>>();
parts.addAll(subject.getPartitions());
assertEquals(subject,subject.removeAllPartitions());
assertEquals(0, subject.getPartitions().size());
for(Partition<TestObject> p:parts)
assertFalse(p.isValid());
}
public void testGetPartitions() {
for(int i=0; i<10; i++)
subject.newPartition();
assertEquals(10,subject.getPartitions().size());
Partition<TestObject> np = subject.newPartition();
ListenableCollection<Partition<TestObject>> partitions;
partitions = subject.getPartitions();
assertNotNull(partitions);
assertTrue((partitions).contains(np));
final int [] added = new int[]{0};
final int [] removed = new int[]{0};
partitions.addCollectionListener(new CollectionListener<Partition<TestObject>>(){
@Override
public void collectionChanged(
CollectionEvent<Partition<TestObject>> evt) {
switch(evt.getType()){
case ADDED:
added[0]++;
break;
case REMOVED:
removed[0]++;
break;
case UPDATED:
fail("Items should not update.");
break;
default:
fail("Unsupported enum: "+evt.getType());
break;
}
}});
assertEquals(0, added[0]);
assertEquals(0, removed[0]);
Partition<TestObject> np1 = subject.newPartition();
assertEquals(1, added[0]);
assertEquals(0, removed[0]);
subject.removePartition(np1);
assertEquals(1, added[0]);
assertEquals(1, removed[0]);
subject.removeAllPartitions();
assertEquals(1, added[0]);
assertEquals(12, removed[0]);
assertTrue(partitions.isEmpty());
}//end testGetPartitions()
public void testGetFlatEntries() {
final ListenableList<Entry<TestObject>> flatEntries = subject.getFlatEntries();
assertNotNull(flatEntries);
final Partition<TestObject> [] parts = new Partition[2];
final TestObject [] to = new TestObject [] {new TestObject(),new TestObject()};
final Entry<TestObject> [] entries = new Entry[4];
final int [] added = new int[1];
final int [] removed = new int[1];
flatEntries.addCollectionListener(new CollectionListener<Entry<TestObject>>(){
@Override
public void collectionChanged(CollectionEvent<Entry<TestObject>> evt) {
switch(evt.getType()){
case ADDED:
added[0]++;
break;
case REMOVED:
removed[0]++;
break;
case UPDATED:
fail("Unsupported enum: "+evt.getType());
break;
default:
break;
}//end EventType
}});
parts[0] = subject.newPartition();
parts[1] = subject.newPartition();
entries[0] = parts[0].newEntry(to[0]);
entries[1] = parts[1].newEntry(to[1]);
assertEquals(2,flatEntries.size());
assertTrue(flatEntries.contains(entries[0]));
assertTrue(flatEntries.contains(entries[1]));
}
public void testGetTotalUnusedIndices() {
Partition<TestObject> p0 = subject.newPartition();
assertEquals(0, subject.getTotalUnusedIndices());
Partition<TestObject> p1 = subject.newPartition();
assertEquals(0, subject.getTotalUnusedIndices());
Entry<TestObject> [] to = new Entry[2];
p0.newEntry(new TestObject());
assertEquals(0, subject.getTotalUnusedIndices());
p0.newEntry(new TestObject());
assertEquals(0, subject.getTotalUnusedIndices());
to[0] = p0.newEntry(new TestObject());
p1.newEntry(new TestObject());
assertEquals(0, subject.getTotalUnusedIndices());
p1.newEntry(new TestObject());
assertEquals(0, subject.getTotalUnusedIndices());
to[1] = p1.newEntry(new TestObject());
assertEquals(0, subject.getTotalUnusedIndices());
to[0].remove();
assertEquals(1, subject.getTotalUnusedIndices());
to[1].remove();
assertEquals(2, subject.getTotalUnusedIndices());
to[3] = p0.newEntry(new TestObject());
assertEquals(1, subject.getTotalUnusedIndices());
subject.defragment(1);
assertEquals(1, subject.getTotalUnusedIndices());
subject.defragment(0);
assertEquals(0, subject.getTotalUnusedIndices());
}
public void testDefragment() {
Partition<TestObject> p0 = subject.newPartition();
Partition<TestObject> p1 = subject.newPartition();
Entry<TestObject> [] to = new Entry[2];
p0.newEntry(new TestObject());
p0.newEntry(new TestObject());
to[0] = p0.newEntry(new TestObject());
p1.newEntry(new TestObject());
p1.newEntry(new TestObject());
to[1] = p1.newEntry(new TestObject());
to[0].remove();
to[1].remove();
int unused = 0;
for(Entry<TestObject> obj:subject.getFlatEntries())
if(obj==null)unused++;
assertEquals(2,unused);
subject.defragment(1);
unused = 0;
for(Entry<TestObject> obj:subject.getFlatEntries())
if(obj==null)unused++;
assertEquals(1,unused);
assertEquals(subject, subject.defragment(0));
assertEquals(0, subject.getTotalUnusedIndices());
unused = 0;
for(Entry<TestObject> obj:subject.getFlatEntries())
if(obj==null)unused++;
assertEquals(0,unused);
}
public void testSetTotalUnusedLimitBehavior() {
final int [] defragInvokes = new int[1];
assertEquals(subject,subject.setTotalUnusedLimitBehavior(new UnusedIndexLimitBehavior(){
@Override
public void proposeDefragmentation(
PartitionedIndexPool<?> poolToCheck)
throws NullPointerException {
defragInvokes[1]++;
}}));
final Partition<TestObject> p = subject.newPartition();
Entry<TestObject> ent;
p.newEntry(new TestObject());
ent = p.newEntry(new TestObject());
assertEquals(0, defragInvokes[0]);
ent.remove();
assertEquals(1, defragInvokes[0]);
}
public void testGetTotalUnusedLimitBehavior() {
UnusedIndexLimitBehavior uilb;
subject.setTotalUnusedLimitBehavior(uilb = new UnusedIndexLimitBehavior(){
@Override
public void proposeDefragmentation(
PartitionedIndexPool<?> poolToCheck)
throws NullPointerException {
}});
assertEquals(uilb, subject.getTotalUnusedLimitBehavior());
}
/////////// PARTITION //////////////
public void testPartitionGetParent(){
assertEquals(subject,subject.newPartition().getParent());
}
public void testPartitionNewEntry(){
Partition<TestObject> p;
Entry<TestObject> entry = (p=subject.newPartition()).newEntry(new TestObject());
assertNotNull(entry);
assertTrue(p.getEntries().contains(entry));
}
public void testPartitionRemove(){
Partition<TestObject> p;
p = subject.newPartition();
assertTrue(p.isValid());
assertEquals(p,p.remove());
assertFalse(p.isValid());
assertFalse(subject.getPartitions().contains(p));
}
public void testPartitionRemoveEntry(){
Partition<TestObject> p;
p = subject.newPartition();
Entry<TestObject> entry = p.newEntry(new TestObject());
assertTrue(p.getEntries().contains(entry));
assertEquals(entry,p.removeEntry(entry));
assertFalse(p.getEntries().contains(entry));
}
public void testPartitionRemoveAllEntries(){
Partition<TestObject> p;
p = subject.newPartition();
Entry<TestObject> entry0 = p.newEntry(new TestObject());
Entry<TestObject> entry1 = p.newEntry(new TestObject());
assertTrue(entry0.isValid());
assertTrue(entry1.isValid());
assertEquals(p,p.removeAllEntries());
assertFalse(entry0.isValid());
assertFalse(entry1.isValid());
}
public void testPartitionGetEntries(){
Partition<TestObject> p;
p = subject.newPartition();
Entry<TestObject> entry0 = p.newEntry(new TestObject());
Entry<TestObject> entry1 = p.newEntry(new TestObject());
assertTrue(p.getEntries().contains(entry0));
assertTrue(p.getEntries().contains(entry1));
}
public void testPartitionGetGlobalStartIndex(){
Partition<TestObject> p;
p = subject.newPartition();
assertEquals(0,p.getGlobalStartIndex());
// ???
}
public void testPartitionGetLengthInIndices(){
Partition<TestObject> p;
p = subject.newPartition();
p.newEntry(new TestObject());
p.newEntry(new TestObject());
p.newEntry(new TestObject());
assertEquals(3,p.getLengthInIndices());
}
public void testPartitionDefragment(){
Partition<TestObject> p0 = subject.newPartition();
Partition<TestObject> p1 = subject.newPartition();
Entry<TestObject> [] to = new Entry[2];
p0.newEntry(new TestObject());
p0.newEntry(new TestObject());
to[0] = p0.newEntry(new TestObject());
p1.newEntry(new TestObject());
p1.newEntry(new TestObject());
to[1] = p1.newEntry(new TestObject());
assertEquals(0,subject.getTotalUnusedIndices());
to[0].remove();
to[1].remove();
assertEquals(2,subject.getTotalUnusedIndices());
assertEquals(p0,p0.defragment(0));
assertEquals(1,subject.getTotalUnusedIndices());
assertEquals(p1,p1.defragment(0));
assertEquals(0,subject.getTotalUnusedIndices());
}
public void testPartitionSetUnusedLimitBehavior(){
Partition<TestObject> p0 = subject.newPartition();
Entry<TestObject> [] to = new Entry[2];
final int [] proposeDefragCounter = new int[1];
assertEquals(null,p0.setUnusedLimitBehavior(new UnusedIndexLimitBehavior(){
@Override
public void proposeDefragmentation(
PartitionedIndexPool<?> poolToCheck)
throws NullPointerException {
proposeDefragCounter[0]++;
}}));
assertEquals(0,proposeDefragCounter[0]);
to[0] = p0.newEntry(new TestObject());
to[1] = p0.newEntry(new TestObject());
assertEquals(0,proposeDefragCounter[0]);
p0.removeEntry(to[0]);
assertEquals(1,proposeDefragCounter[0]);
}
public void testPartitionGetUnusedLimitBehavior(){
Partition<TestObject> p0 = subject.newPartition();
Entry<TestObject> [] to = new Entry[2];
final int [] proposeDefragCounter = new int[1];
UnusedIndexLimitBehavior uilb;
assertEquals(null,p0.setUnusedLimitBehavior(uilb = new UnusedIndexLimitBehavior(){
@Override
public void proposeDefragmentation(
PartitionedIndexPool<?> poolToCheck)
throws NullPointerException {
proposeDefragCounter[0]++;
}}));
assertEquals(uilb,p0.getUnusedLimitBehavior());
}
public void testPartitionIsValid(){
Partition<TestObject> p0 = subject.newPartition();
assertTrue(p0.isValid());
p0.remove();
assertFalse(p0.isValid());
}
///////// ENTRY /////////////
public void testGetParent(){
Partition<TestObject> p0 = subject.newPartition();
Entry<TestObject> ent = p0.newEntry(new TestObject());
assertEquals(p0,ent.getParent());
}
public void testGet(){
Partition<TestObject> p0 = subject.newPartition();
TestObject to = new TestObject();
Entry<TestObject> ent = p0.newEntry(to);
assertEquals(to,ent.get());
}
public void testGetLocalIndex(){
Partition<TestObject> p0 = subject.newPartition();
TestObject to = new TestObject();
Entry<TestObject> ent = p0.newEntry(to);
assertEquals(0,ent.getLocalIndex());
}
public void testGetGlobalIndex(){
Partition<TestObject> p0 = subject.newPartition();
TestObject to = new TestObject();
Entry<TestObject> ent = p0.newEntry(to);
assertEquals(0,ent.getGlobalIndex());
}
public void testRemove(){
Partition<TestObject> p0 = subject.newPartition();
TestObject to = new TestObject();
Entry<TestObject> ent = p0.newEntry(to);
assertEquals(ent,ent.remove());
}
public void testIsValid(){
Partition<TestObject> p0 = subject.newPartition();
TestObject to = new TestObject();
Entry<TestObject> ent = p0.newEntry(to);
assertTrue(ent.isValid());
ent.remove();
assertFalse(ent.isValid());
}
}//end PartitionedIndexPoolTest
| Delete PartitionedIndexPoolTest.java | src/test/java/org/jtrfp/trcl/pool/PartitionedIndexPoolTest.java | Delete PartitionedIndexPoolTest.java | <ide><path>rc/test/java/org/jtrfp/trcl/pool/PartitionedIndexPoolTest.java
<del>/*******************************************************************************
<del> * This file is part of TERMINAL RECALL
<del> * Copyright (c) 2012-2015 Chuck Ritola
<del> * Part of the jTRFP.org project
<del> * All rights reserved. This program and the accompanying materials
<del> * are made available under the terms of the GNU Public License v3.0
<del> * which accompanies this distribution, and is available at
<del> * http://www.gnu.org/licenses/gpl.html
<del> *
<del> * Contributors:
<del> * chuck - initial API and implementation
<del> ******************************************************************************/
<del>
<del>package org.jtrfp.trcl.pool;
<del>
<del>import static org.junit.Assert.assertEquals;
<del>import static org.junit.Assert.assertFalse;
<del>import static org.junit.Assert.assertNotNull;
<del>import static org.junit.Assert.assertTrue;
<del>import static org.junit.Assert.fail;
<del>
<del>import java.util.ArrayList;
<del>import java.util.Collection;
<del>
<del>import org.jtrfp.trcl.pool.PartitionedIndexPool.Entry;
<del>import org.jtrfp.trcl.pool.PartitionedIndexPool.Partition;
<del>import org.jtrfp.trcl.pool.PartitionedIndexPool.UnusedIndexLimitBehavior;
<del>
<del>import com.ochafik.util.listenable.CollectionEvent;
<del>import com.ochafik.util.listenable.CollectionListener;
<del>import com.ochafik.util.listenable.ListenableCollection;
<del>import com.ochafik.util.listenable.ListenableList;
<del>
<del>public abstract class PartitionedIndexPoolTest {
<del>
<del> protected PartitionedIndexPool<TestObject> subject;
<del>
<del> protected class TestObject{
<del> }//end TestObject
<del>
<del> protected abstract PartitionedIndexPool<TestObject> newSubject();
<del>
<del> public void testNewPartition() {
<del> Partition<TestObject> tp = subject.newPartition();
<del> assertNotNull(tp);
<del> }
<del>
<del> public void testRemovePartition() {
<del> Partition<TestObject> tp = subject.newPartition();
<del> assertTrue(subject.getPartitions().contains(tp));
<del> assertTrue(tp.isValid());
<del> assertEquals(subject,subject.removePartition(tp));
<del> assertFalse(subject.getPartitions().contains(tp));
<del> assertFalse(tp.isValid());
<del> }
<del>
<del> public void testRemoveAllPartitions() {
<del> for(int i=0; i<10; i++)
<del> subject.newPartition();
<del> assertEquals(10, subject.getPartitions().size());
<del> Collection<Partition<TestObject>> parts = new ArrayList<Partition<TestObject>>();
<del> parts.addAll(subject.getPartitions());
<del> assertEquals(subject,subject.removeAllPartitions());
<del> assertEquals(0, subject.getPartitions().size());
<del> for(Partition<TestObject> p:parts)
<del> assertFalse(p.isValid());
<del> }
<del>
<del> public void testGetPartitions() {
<del> for(int i=0; i<10; i++)
<del> subject.newPartition();
<del> assertEquals(10,subject.getPartitions().size());
<del> Partition<TestObject> np = subject.newPartition();
<del> ListenableCollection<Partition<TestObject>> partitions;
<del> partitions = subject.getPartitions();
<del> assertNotNull(partitions);
<del> assertTrue((partitions).contains(np));
<del> final int [] added = new int[]{0};
<del> final int [] removed = new int[]{0};
<del> partitions.addCollectionListener(new CollectionListener<Partition<TestObject>>(){
<del> @Override
<del> public void collectionChanged(
<del> CollectionEvent<Partition<TestObject>> evt) {
<del> switch(evt.getType()){
<del> case ADDED:
<del> added[0]++;
<del> break;
<del> case REMOVED:
<del> removed[0]++;
<del> break;
<del> case UPDATED:
<del> fail("Items should not update.");
<del> break;
<del> default:
<del> fail("Unsupported enum: "+evt.getType());
<del> break;
<del> }
<del> }});
<del> assertEquals(0, added[0]);
<del> assertEquals(0, removed[0]);
<del> Partition<TestObject> np1 = subject.newPartition();
<del> assertEquals(1, added[0]);
<del> assertEquals(0, removed[0]);
<del> subject.removePartition(np1);
<del> assertEquals(1, added[0]);
<del> assertEquals(1, removed[0]);
<del> subject.removeAllPartitions();
<del> assertEquals(1, added[0]);
<del> assertEquals(12, removed[0]);
<del> assertTrue(partitions.isEmpty());
<del> }//end testGetPartitions()
<del>
<del> public void testGetFlatEntries() {
<del> final ListenableList<Entry<TestObject>> flatEntries = subject.getFlatEntries();
<del> assertNotNull(flatEntries);
<del> final Partition<TestObject> [] parts = new Partition[2];
<del> final TestObject [] to = new TestObject [] {new TestObject(),new TestObject()};
<del> final Entry<TestObject> [] entries = new Entry[4];
<del> final int [] added = new int[1];
<del> final int [] removed = new int[1];
<del> flatEntries.addCollectionListener(new CollectionListener<Entry<TestObject>>(){
<del> @Override
<del> public void collectionChanged(CollectionEvent<Entry<TestObject>> evt) {
<del> switch(evt.getType()){
<del> case ADDED:
<del> added[0]++;
<del> break;
<del> case REMOVED:
<del> removed[0]++;
<del> break;
<del> case UPDATED:
<del> fail("Unsupported enum: "+evt.getType());
<del> break;
<del> default:
<del> break;
<del> }//end EventType
<del> }});
<del> parts[0] = subject.newPartition();
<del> parts[1] = subject.newPartition();
<del> entries[0] = parts[0].newEntry(to[0]);
<del> entries[1] = parts[1].newEntry(to[1]);
<del> assertEquals(2,flatEntries.size());
<del> assertTrue(flatEntries.contains(entries[0]));
<del> assertTrue(flatEntries.contains(entries[1]));
<del> }
<del>
<del> public void testGetTotalUnusedIndices() {
<del> Partition<TestObject> p0 = subject.newPartition();
<del> assertEquals(0, subject.getTotalUnusedIndices());
<del> Partition<TestObject> p1 = subject.newPartition();
<del> assertEquals(0, subject.getTotalUnusedIndices());
<del>
<del> Entry<TestObject> [] to = new Entry[2];
<del>
<del> p0.newEntry(new TestObject());
<del> assertEquals(0, subject.getTotalUnusedIndices());
<del> p0.newEntry(new TestObject());
<del> assertEquals(0, subject.getTotalUnusedIndices());
<del> to[0] = p0.newEntry(new TestObject());
<del>
<del> p1.newEntry(new TestObject());
<del> assertEquals(0, subject.getTotalUnusedIndices());
<del> p1.newEntry(new TestObject());
<del> assertEquals(0, subject.getTotalUnusedIndices());
<del> to[1] = p1.newEntry(new TestObject());
<del> assertEquals(0, subject.getTotalUnusedIndices());
<del>
<del> to[0].remove();
<del> assertEquals(1, subject.getTotalUnusedIndices());
<del> to[1].remove();
<del> assertEquals(2, subject.getTotalUnusedIndices());
<del>
<del> to[3] = p0.newEntry(new TestObject());
<del> assertEquals(1, subject.getTotalUnusedIndices());
<del>
<del> subject.defragment(1);
<del> assertEquals(1, subject.getTotalUnusedIndices());
<del> subject.defragment(0);
<del> assertEquals(0, subject.getTotalUnusedIndices());
<del> }
<del>
<del> public void testDefragment() {
<del> Partition<TestObject> p0 = subject.newPartition();
<del> Partition<TestObject> p1 = subject.newPartition();
<del>
<del> Entry<TestObject> [] to = new Entry[2];
<del>
<del> p0.newEntry(new TestObject());
<del> p0.newEntry(new TestObject());
<del> to[0] = p0.newEntry(new TestObject());
<del>
<del> p1.newEntry(new TestObject());
<del> p1.newEntry(new TestObject());
<del> to[1] = p1.newEntry(new TestObject());
<del>
<del> to[0].remove();
<del> to[1].remove();
<del>
<del> int unused = 0;
<del> for(Entry<TestObject> obj:subject.getFlatEntries())
<del> if(obj==null)unused++;
<del> assertEquals(2,unused);
<del>
<del> subject.defragment(1);
<del> unused = 0;
<del> for(Entry<TestObject> obj:subject.getFlatEntries())
<del> if(obj==null)unused++;
<del> assertEquals(1,unused);
<del>
<del> assertEquals(subject, subject.defragment(0));
<del> assertEquals(0, subject.getTotalUnusedIndices());
<del> unused = 0;
<del> for(Entry<TestObject> obj:subject.getFlatEntries())
<del> if(obj==null)unused++;
<del> assertEquals(0,unused);
<del> }
<del>
<del> public void testSetTotalUnusedLimitBehavior() {
<del> final int [] defragInvokes = new int[1];
<del> assertEquals(subject,subject.setTotalUnusedLimitBehavior(new UnusedIndexLimitBehavior(){
<del> @Override
<del> public void proposeDefragmentation(
<del> PartitionedIndexPool<?> poolToCheck)
<del> throws NullPointerException {
<del> defragInvokes[1]++;
<del> }}));
<del> final Partition<TestObject> p = subject.newPartition();
<del> Entry<TestObject> ent;
<del> p.newEntry(new TestObject());
<del> ent = p.newEntry(new TestObject());
<del> assertEquals(0, defragInvokes[0]);
<del> ent.remove();
<del> assertEquals(1, defragInvokes[0]);
<del> }
<del>
<del> public void testGetTotalUnusedLimitBehavior() {
<del> UnusedIndexLimitBehavior uilb;
<del> subject.setTotalUnusedLimitBehavior(uilb = new UnusedIndexLimitBehavior(){
<del> @Override
<del> public void proposeDefragmentation(
<del> PartitionedIndexPool<?> poolToCheck)
<del> throws NullPointerException {
<del> }});
<del> assertEquals(uilb, subject.getTotalUnusedLimitBehavior());
<del> }
<del>
<del> /////////// PARTITION //////////////
<del>
<del> public void testPartitionGetParent(){
<del> assertEquals(subject,subject.newPartition().getParent());
<del> }
<del>
<del> public void testPartitionNewEntry(){
<del> Partition<TestObject> p;
<del> Entry<TestObject> entry = (p=subject.newPartition()).newEntry(new TestObject());
<del> assertNotNull(entry);
<del> assertTrue(p.getEntries().contains(entry));
<del> }
<del>
<del> public void testPartitionRemove(){
<del> Partition<TestObject> p;
<del> p = subject.newPartition();
<del> assertTrue(p.isValid());
<del> assertEquals(p,p.remove());
<del> assertFalse(p.isValid());
<del> assertFalse(subject.getPartitions().contains(p));
<del> }
<del>
<del> public void testPartitionRemoveEntry(){
<del> Partition<TestObject> p;
<del> p = subject.newPartition();
<del> Entry<TestObject> entry = p.newEntry(new TestObject());
<del> assertTrue(p.getEntries().contains(entry));
<del> assertEquals(entry,p.removeEntry(entry));
<del> assertFalse(p.getEntries().contains(entry));
<del> }
<del>
<del> public void testPartitionRemoveAllEntries(){
<del> Partition<TestObject> p;
<del> p = subject.newPartition();
<del> Entry<TestObject> entry0 = p.newEntry(new TestObject());
<del> Entry<TestObject> entry1 = p.newEntry(new TestObject());
<del> assertTrue(entry0.isValid());
<del> assertTrue(entry1.isValid());
<del> assertEquals(p,p.removeAllEntries());
<del> assertFalse(entry0.isValid());
<del> assertFalse(entry1.isValid());
<del> }
<del>
<del> public void testPartitionGetEntries(){
<del> Partition<TestObject> p;
<del> p = subject.newPartition();
<del> Entry<TestObject> entry0 = p.newEntry(new TestObject());
<del> Entry<TestObject> entry1 = p.newEntry(new TestObject());
<del> assertTrue(p.getEntries().contains(entry0));
<del> assertTrue(p.getEntries().contains(entry1));
<del> }
<del>
<del> public void testPartitionGetGlobalStartIndex(){
<del> Partition<TestObject> p;
<del> p = subject.newPartition();
<del> assertEquals(0,p.getGlobalStartIndex());
<del> // ???
<del> }
<del>
<del> public void testPartitionGetLengthInIndices(){
<del> Partition<TestObject> p;
<del> p = subject.newPartition();
<del> p.newEntry(new TestObject());
<del> p.newEntry(new TestObject());
<del> p.newEntry(new TestObject());
<del> assertEquals(3,p.getLengthInIndices());
<del> }
<del>
<del> public void testPartitionDefragment(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> Partition<TestObject> p1 = subject.newPartition();
<del>
<del> Entry<TestObject> [] to = new Entry[2];
<del>
<del> p0.newEntry(new TestObject());
<del> p0.newEntry(new TestObject());
<del> to[0] = p0.newEntry(new TestObject());
<del>
<del> p1.newEntry(new TestObject());
<del> p1.newEntry(new TestObject());
<del> to[1] = p1.newEntry(new TestObject());
<del>
<del> assertEquals(0,subject.getTotalUnusedIndices());
<del> to[0].remove();
<del> to[1].remove();
<del> assertEquals(2,subject.getTotalUnusedIndices());
<del>
<del> assertEquals(p0,p0.defragment(0));
<del> assertEquals(1,subject.getTotalUnusedIndices());
<del>
<del> assertEquals(p1,p1.defragment(0));
<del> assertEquals(0,subject.getTotalUnusedIndices());
<del> }
<del>
<del> public void testPartitionSetUnusedLimitBehavior(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> Entry<TestObject> [] to = new Entry[2];
<del>
<del> final int [] proposeDefragCounter = new int[1];
<del> assertEquals(null,p0.setUnusedLimitBehavior(new UnusedIndexLimitBehavior(){
<del> @Override
<del> public void proposeDefragmentation(
<del> PartitionedIndexPool<?> poolToCheck)
<del> throws NullPointerException {
<del> proposeDefragCounter[0]++;
<del> }}));
<del> assertEquals(0,proposeDefragCounter[0]);
<del> to[0] = p0.newEntry(new TestObject());
<del> to[1] = p0.newEntry(new TestObject());
<del> assertEquals(0,proposeDefragCounter[0]);
<del> p0.removeEntry(to[0]);
<del> assertEquals(1,proposeDefragCounter[0]);
<del> }
<del>
<del> public void testPartitionGetUnusedLimitBehavior(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> Entry<TestObject> [] to = new Entry[2];
<del>
<del> final int [] proposeDefragCounter = new int[1];
<del> UnusedIndexLimitBehavior uilb;
<del> assertEquals(null,p0.setUnusedLimitBehavior(uilb = new UnusedIndexLimitBehavior(){
<del> @Override
<del> public void proposeDefragmentation(
<del> PartitionedIndexPool<?> poolToCheck)
<del> throws NullPointerException {
<del> proposeDefragCounter[0]++;
<del> }}));
<del> assertEquals(uilb,p0.getUnusedLimitBehavior());
<del> }
<del>
<del> public void testPartitionIsValid(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> assertTrue(p0.isValid());
<del> p0.remove();
<del> assertFalse(p0.isValid());
<del> }
<del>
<del> ///////// ENTRY /////////////
<del>
<del> public void testGetParent(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> Entry<TestObject> ent = p0.newEntry(new TestObject());
<del> assertEquals(p0,ent.getParent());
<del> }
<del>
<del> public void testGet(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> TestObject to = new TestObject();
<del> Entry<TestObject> ent = p0.newEntry(to);
<del> assertEquals(to,ent.get());
<del> }
<del>
<del> public void testGetLocalIndex(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> TestObject to = new TestObject();
<del> Entry<TestObject> ent = p0.newEntry(to);
<del> assertEquals(0,ent.getLocalIndex());
<del> }
<del>
<del> public void testGetGlobalIndex(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> TestObject to = new TestObject();
<del> Entry<TestObject> ent = p0.newEntry(to);
<del> assertEquals(0,ent.getGlobalIndex());
<del> }
<del>
<del> public void testRemove(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> TestObject to = new TestObject();
<del> Entry<TestObject> ent = p0.newEntry(to);
<del> assertEquals(ent,ent.remove());
<del> }
<del>
<del> public void testIsValid(){
<del> Partition<TestObject> p0 = subject.newPartition();
<del> TestObject to = new TestObject();
<del> Entry<TestObject> ent = p0.newEntry(to);
<del> assertTrue(ent.isValid());
<del> ent.remove();
<del> assertFalse(ent.isValid());
<del> }
<del>}//end PartitionedIndexPoolTest |
||
Java | bsd-3-clause | 452a1381438b68cef3b25c548c86ac0be8d89863 | 0 | interdroid/ibis-ipl,interdroid/ibis-ipl,interdroid/ibis-ipl | package ibis.impl.net;
import ibis.ipl.ConnectionTimedOutException;
import ibis.ipl.IbisConfigurationException;
import ibis.ipl.ReadMessage;
import ibis.ipl.ReceivePort;
import ibis.ipl.ReceivePortConnectUpcall;
import ibis.ipl.ReceivePortIdentifier;
import ibis.ipl.ReceiveTimedOutException;
import ibis.ipl.SendPortIdentifier;
import ibis.ipl.Upcall;
import ibis.util.IPUtils;
import ibis.util.TypedProperties;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Vector;
/**
* Provides an implementation of the {@link ReceivePort} and {@link
* ReadMessage} interfaces of the IPL.
*/
public final class NetReceivePort extends NetPort implements ReceivePort,
ReadMessage, NetInputUpcall, NetEventQueueConsumer {
/* ___ INTERNAL CLASSES ____________________________________________ */
/* --- incoming connection manager thread -- */
/**
* The incoming connection management thread class.
*/
private final class AcceptThread extends Thread {
/**
* Flag indicating whether thread termination was requested.
*/
private boolean end = false;
public AcceptThread(String name) {
super("NetReceivePort.AcceptThread: " + name);
setDaemon(true);
}
/**
* The incoming connection management function.
*/
/*
* // Note: the thread is <strong>uninterruptible</strong>
* // during the network input locking.
*/
public void run() {
log.in("accept thread starting");
accept_loop: while (!end) {
NetServiceLink link = null;
try {
link = new NetServiceLink(eventQueue, serverSocket);
} catch (ConnectionTimedOutException e) {
continue accept_loop;
} catch (InterruptedIOException e) {
continue accept_loop;
} catch (IOException e) {
__.fwdAbort__(e);
}
Integer num = null;
NetSendPortIdentifier spi = null;
long startSeqno = 0;
num = new Integer(nextSendPortNum++);
try {
link.init(num);
} catch (IOException e) {
__.fwdAbort__(e);
}
String peerPrefix = null;
try {
ObjectInputStream is = new ObjectInputStream(
link.getInputSubStream("__port__"));
spi = (NetSendPortIdentifier) is.readObject();
int rank = is.readInt();
int spmid = is.readInt();
startSeqno = is.readLong();
trace.disp(receivePortTracePrefix
+ "New connection from: _s" + rank + "-" + spmid
+ "_");
peerPrefix = "_s" + rank + "-" + spmid + "_";
is.close();
ObjectOutputStream os = new ObjectOutputStream(
link.getOutputSubStream("__port__"));
os.writeInt(receivePortMessageRank);
os.writeInt(receivePortMessageId);
os.flush();
os.close();
} catch (IOException e) {
__.fwdAbort__(e);
} catch (ClassNotFoundException e) {
__.fwdAbort__(e);
}
connect_loop: while (!end) {
try {
connectionLock.ilock();
try {
//inputLock.lock();
NetConnection cnx = new NetConnection(
NetReceivePort.this, num, spi, identifier,
link, startSeqno);
synchronized (connectionTable) {
connectionTable.put(num, cnx);
connectedPeers.add(spi);
if (rpcu != null) {
rpcu.gotConnection(NetReceivePort.this,
spi);
}
if (connectionTable.size() == 1) {
singleConnection = cnx;
} else {
singleConnection = null;
}
if (connectionTable.size()
> maxLiveConnections) {
maxLiveConnections = connectionTable.size();
}
}
input.setupConnection(cnx);
input.startReceive();
//inputLock.unlock();
} finally {
connectionLock.unlock();
}
} catch (InterruptedIOException e) {
System.err.println(NetIbis.hostName()
+ ": While connecting meet " + e);
continue connect_loop;
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
}
break connect_loop;
}
}
log.out("accept thread leaving");
}
protected void end() throws IOException {
log.in();
synchronized (connectionLock) {
end = true;
}
if (serverSocket != null) {
serverSocket.close();
}
log.out();
}
}
/* ___ CONFIGURATION FLAGS _________________________________________ */
private boolean useUpcall = false;
/**
* Flag indicating whether unsuccessful active polling should be
* followed by a yield.
*
* Note: this flag only affects asynchronous multithreaded
* polling or synchronous {@link #receive} operations. In
* particular, the synchronous {@link #poll} operation is not
* affected.
*/
private boolean useYield = true;
/**
* Flag indicating whether receive should block in the poll()
* that necessarily precedes it, or whether we want to poll in a
* busy-wait style from this.
*/
public static final boolean useBlockingPoll = true;
/* ___ EVENT QUEUE _________________________________________________ */
private NetEventQueue eventQueue = null;
private NetEventQueueListener eventQueueListener = null;
/* ___ LESS-IMPORTANT OBJECTS ______________________________________ */
/**
* The upcall callback function.
*/
private Upcall upcall = null;
private ReceivePortConnectUpcall rpcu = null;
/**
* The port identifier.
*/
private NetReceivePortIdentifier identifier = null;
/**
* The TCP server socket.
*/
private ServerSocket serverSocket = null;
/**
* The next send port integer number.
*/
private int nextSendPortNum = 0;
/**
* Performance statistic
*/
// private int n_yield;
/**
* Maintain a linked list for cleanup
*/
NetReceivePort next = null;
/* ___ IMPORTANT OBJECTS ___________________________________________ */
/**
* Cache a single connection for fast lookup in the frequent case of one
* connection
*/
private NetConnection singleConnection = null;
/**
* The port's topmost input.
*/
private NetInput input = null;
/* ___ THREADS _____________________________________________________ */
/**
* The incoming connection management thread.
*/
private AcceptThread acceptThread = null;
/* ___ STATE _______________________________________________________ */
/**
* The current active peer port.
*/
private Integer activeSendPortNum = null;
/**
* Flag indicating whether incoming connections are currently enabled.
*/
private boolean connectionEnabled = false;
/**
* Flag indicating whether successful polling operation should
* generate an upcall or not.
*/
private boolean upcallsEnabled = false;
/**
* The empty message detection flag.
*
* The flag is set on each new {@link #_receive} call and should be
* cleared as soon as at least a byte as been added to the living message.
*/
private boolean emptyMsg = true;
/**
* Indicate whether {@link #finish} should unlock the {@link #finishMutex}.
*/
private boolean finishNotify = false;
/**
* Indicate whether {@link #finish} should unlock the {@link #pollingLock}
*/
private boolean pollingNotify = false;
/**
* Reference the current upcall thread.
*
*/
private Runnable currentThread = null;
/**
* Internal receive port counter, for debugging.
*/
static private int receivePortCount = 0;
/**
* Seqno for numbered messages
*/
private long messageSeqno = -1;
/**
* Internal receive port id, for debugging.
*/
private int receivePortMessageId = -1;
/**
* Process rank, for debugging.
*/
private int receivePortMessageRank = 0;
/**
* Tracing log message prefix, for debugging.
*
*/
private String receivePortTracePrefix = null;
private Vector connectedPeers = null;
private Vector disconnectedPeers = null;
private int maxLiveConnections = 0;
private boolean closed = false;
/* ___ LOCKS _______________________________________________________ */
/**
* The polling autorisation lock.
*/
private NetMutex pollingLock = null;
/**
* The message extraction autorisation lock.
*/
private NetMutex polledLock = null;
/**
* The incoming connection acceptation lock.
*/
private NetMutex connectionLock = null;
/**
* The network input synchronization lock.
*/
private ibis.util.Monitor inputLock = null;
private NetMutex finishMutex = null;
private Object dummyUpcallSync = new Object();
private int upcallsPending = 0;
/**
* Make a fast path for the (frequent) case that there is only one
* connection
*/
private NetConnection getActiveConnection() {
if (activeSendPortNum == null) {
return null;
}
if (singleConnection != null) {
return singleConnection;
}
return (NetConnection) connectionTable.get(activeSendPortNum);
}
private NetConnection checkClose() {
NetConnection cnx = getActiveConnection();
cnx.msgSeqno++;
if (cnx.msgSeqno >= cnx.closeSeqno) {
synchronized (connectionTable) {
connectionTable.notifyAll();
}
}
return cnx;
}
/* --- Upcall from main input object -- */
public void inputUpcall(NetInput input, Integer spn) throws IOException {
log.in();
if (this.input == null) {
__.warning__("message lost");
return;
}
if (spn == null) {
throw new Error("invalid state: NetReceivePort.inputUpcall");
}
activeSendPortNum = spn;
if (upcall != null && !upcallsEnabled) {
synchronized (dummyUpcallSync) {
upcallsPending++;
while (!upcallsEnabled) {
try {
dummyUpcallSync.wait();
} catch (InterruptedException e) {
// Go on waiting
}
}
upcallsPending--;
}
}
if (upcall != null && upcallsEnabled) {
final ReadMessage rm = _receive();
Thread me = Thread.currentThread();
currentThread = me;
upcall.upcall(rm);
if (me == currentThread) {
currentThread = null;
if (emptyMsg) {
input.handleEmptyMsg();
emptyMsg = false;
}
checkClose();
trace.disp(receivePortTracePrefix, "message receive <--");
}
} else {
synchronized (dummyUpcallSync) {
finishNotify = true;
polledLock.unlock();
finishMutex.lock();
}
}
log.out();
}
public void closeFromRemote(NetConnection cnx) {
synchronized (connectionTable) {
if (cnx.regularClosers > 0) {
connectionTable.notifyAll();
}
}
}
/* --- NetEventQueueConsumer part --- */
public void event(NetEvent e) {
log.in();
NetPortEvent event = (NetPortEvent) e;
log.disp("IN: event.code() = ", event.code());
switch (event.code()) {
case NetPortEvent.CLOSE_EVENT: {
Integer num = (Integer) event.arg();
NetConnection cnx;
boolean timed_out = false;
while (true) {
synchronized (connectionTable) {
cnx = (NetConnection) connectionTable.get(num);
}
if (cnx == null) {
return;
}
if (timed_out) {
break;
}
if (cnx.closeSeqno != Long.MAX_VALUE) {
// Maybe we overtook the regular disconnect.
// Give it a little time to finish
try {
Thread.sleep(1000);
timed_out = true;
} catch (InterruptedException ei) {
// Give up
}
}
}
NetSendPortIdentifier nspi = null;
synchronized (connectionTable) {
nspi = cnx.getSendId();
cnx = (NetConnection) connectionTable.remove(cnx.getNum());
if (connectionTable.size() == 1) {
Enumeration elts = connectionTable.elements();
singleConnection = (NetConnection) elts.nextElement();
} else {
singleConnection = null;
}
disconnectedPeers.add(nspi);
}
if (rpcu != null) {
rpcu.lostConnection(this, nspi, new Exception());
}
try {
close(cnx, false);
} catch (IOException ei) {
throw new Error("close fails");
}
synchronized (connectionTable) {
cnx.closeSeqno = 0;
connectionTable.notifyAll();
}
}
break;
default:
throw new Error("invalid event code");
}
log.out();
}
/* --- NetReceivePort part --- */
/*
* Constructor.
*
* @param type the {@linkplain ibis.impl.net.NetPortType port type}.
* @param name the name of the port.
* @param upcall the reception upcall callback.
*/
public NetReceivePort(NetPortType type, String name, Upcall upcall,
ReceivePortConnectUpcall rpcu, boolean connectionAdministration)
throws IOException {
this.type = type;
this.name = name;
this.upcall = upcall;
this.rpcu = rpcu;
this.ibis = type.getIbis();
initDebugStreams();
initPassiveObjects();
initGlobalSettings(upcall != null);
initMainInput();
initServerSocket();
initIdentifier();
initActiveObjects();
ibis.register(this);
start();
}
private void initDebugStreams() {
receivePortMessageId = receivePortCount++;
receivePortMessageRank = ((NetIbis) type.getIbis()).closedPoolRank();
receivePortTracePrefix = "_r" + receivePortMessageRank + "-"
+ receivePortMessageId + "_ ";
String s = "//" + type.name() + " receivePort(" + name + ")/";
boolean log = type.getBooleanStringProperty(null, "Log", false);
boolean trace = type.getBooleanStringProperty(null, "Trace", false);
boolean disp = type.getBooleanStringProperty(null, "Disp",
TypedProperties.booleanProperty("net.disp"));
this.log = new NetLog(log, s, "LOG");
this.trace = new NetLog(trace, s, "TRACE");
this.disp = new NetLog(disp, s, "DISP");
this.trace.disp(receivePortTracePrefix, " receive port created");
}
private void initPassiveObjects() {
log.in();
connectionTable = new Hashtable();
polledLock = new NetMutex(true);
pollingLock = new NetMutex(false);
connectionLock = new NetMutex(true);
inputLock = new ibis.util.Monitor();
finishMutex = new NetMutex(true);
props = new NetDynamicProperties();
log.out();
}
private void initMainInput() throws IOException {
log.in();
String mainDriverName = type.getStringProperty("/", "Driver");
if (mainDriverName == null) {
throw new IbisConfigurationException("root driver not specified");
}
driver = ibis.getDriver(mainDriverName);
if (driver == null) {
throw new IbisConfigurationException("driver not found");
}
input = driver.newInput(type, null, useUpcall ? this : null);
log.out();
}
private void initGlobalSettings(boolean upcallSpecified) {
log.in();
useYield = type.getBooleanStringProperty(null, "UseYield",
TypedProperties.booleanProperty(NetIbis.port_yield, useYield));
useUpcall = type.getBooleanStringProperty(null, "UseUpcall", useUpcall);
// useBlockingPoll = type.getBooleanStringProperty(null, "UseBlockingPoll", useBlockingPoll );
if (!useUpcall && upcallSpecified) {
useUpcall = true;
}
if (!useYield) {
System.err.println("useYield " + useYield);
}
disp.disp("__ Configuration ____");
disp.disp("Upcall engine........", __.state__(useUpcall));
disp.disp("Yield................", __.state__(useYield));
disp.disp("Blocking poll........", __.state__(useBlockingPoll));
disp.disp("_____________________");
log.out();
}
private void initServerSocket() throws IOException {
log.in();
serverSocket = NetIbis.socketFactory.createServerSocket(0, 0,
IPUtils.getLocalHostAddress());
log.out();
}
private void initIdentifier() {
log.in();
Hashtable info = new Hashtable();
InetAddress addr = serverSocket.getInetAddress();
int port = serverSocket.getLocalPort();
info.put("accept_address", addr);
info.put("accept_port", new Integer(port));
NetIbisIdentifier ibisId = (NetIbisIdentifier) ibis.identifier();
identifier = new NetReceivePortIdentifier(name, type.name(), ibisId,
info);
log.out();
}
private void initActiveObjects() {
log.in();
connectedPeers = new Vector();
disconnectedPeers = new Vector();
acceptThread = new AcceptThread(name);
eventQueue = new NetEventQueue();
eventQueueListener = new NetEventQueueListener(this, "ReceivePort: "
+ name, eventQueue);
log.out();
}
private void start() {
log.in();
eventQueueListener.setDaemon(true);
eventQueueListener.start();
acceptThread.start();
log.out();
}
private Thread pollerThread;
/**
* The internal synchronous polling function.
*
* The calling thread is <strong>uninterruptible</strong> during
* the network input locking operation. The function may block
* if the {@linkplain #inputLock network input lock} is not available.
*/
private boolean _doPoll(boolean block) throws IOException {
log.in();
inputLock.lock();
pollerThread = Thread.currentThread();
try {
activeSendPortNum = input.poll(block);
} finally {
pollerThread = null;
inputLock.unlock();
}
if (activeSendPortNum == null) {
log.out("activeSendPortNum = null");
return false;
}
log.out("activeSendPortNum = ", activeSendPortNum);
return true;
}
public long sequenceNumber() {
return messageSeqno;
}
/**
* Internally initializes a new reception.
*/
private ReadMessage _receive() throws IOException {
log.in();
emptyMsg = true;
if (type.numbered()) {
emptyMsg = false;
messageSeqno = input.readSeqno();
// System.err.println(NetIbis.hostName() + " " + this + ": receive msg with seqno " + messageSeqno);
}
if (trace.on()) {
final String messageId = readString();
trace.disp(receivePortTracePrefix, "message receive --> ",
messageId);
}
log.out();
return this;
}
/**
* Blockingly attempts to receive a message.
*
* Note: if upcalls are currently enabled, this function is bypassed
* by the upcall callback unless no callback has been specified.
*
* @return A {@link ReadMessage} instance.
*/
public ReadMessage receive() throws IOException {
log.in();
if (useUpcall) {
polledLock.lock();
} else {
if (useYield) {
while (!_doPoll(useBlockingPoll)) {
NetIbis.yield();
// n_yield++;
}
} else {
while (!_doPoll(useBlockingPoll)) { /* do noting */
}
}
}
log.out();
return _receive();
}
public ReadMessage receive(long millis) throws IOException {
if (millis == 0) {
return receive();
} else {
long top = System.currentTimeMillis();
ReadMessage rm = null;
do {
rm = poll();
} while (rm == null && (System.currentTimeMillis() - top) < millis);
if (rm == null) {
throw new ReceiveTimedOutException("timeout expired in receive");
}
return rm;
}
}
/**
* Unblockingly attempts to receive a message.
*
* Note: if upcalls are currently enabled, this function is bypassed
* by the upcall callback unless no callback has been specified.
*
* @return A {@link ReadMessage} instance or <code>null</code> if polling
* was unsuccessful.
*/
public ReadMessage poll() throws IOException {
log.in();
if (useUpcall) {
if (!polledLock.trylock()) {
log.out("poll failure 1");
return null;
}
} else {
if (!_doPoll(false)) {
log.out("poll failure 2");
return null;
}
}
log.out("poll success");
return _receive();
}
public ReceivePortIdentifier identifier() {
log.in();
log.out();
return identifier;
}
/**
* Returns the identifier of the current active port peer or
* <code>null</code> if no peer port is active.
*
* @return The identifier of the port.
*/
protected NetSendPortIdentifier getActiveSendPortIdentifier() {
log.in();
NetConnection cnx = getActiveConnection();
if (cnx == null) {
throw new Error("no active sendPort");
}
if (cnx.getSendId() == null) {
throw new Error("invalid state: cnx.getSendId");
}
NetSendPortIdentifier id = cnx.getSendId();
log.out();
return id;
}
public SendPortIdentifier[] connectedTo() {
synchronized (connectionTable) {
int size = connectionTable.size();
SendPortIdentifier t[] = new SendPortIdentifier[size];
Iterator it = connectionTable.values().iterator();
int i = 0;
while (it.hasNext()) {
NetConnection cnx = (NetConnection) it.next();
t[i++] = cnx.getSendId();
}
return t;
}
}
public SendPortIdentifier[] lostConnections() {
synchronized (connectionTable) {
int size = disconnectedPeers.size();
SendPortIdentifier t[] = new SendPortIdentifier[size];
disconnectedPeers.copyInto(t);
disconnectedPeers.clear();
return t;
}
}
public SendPortIdentifier[] newConnections() {
synchronized (connectionTable) {
int size = connectedPeers.size();
SendPortIdentifier t[] = new SendPortIdentifier[size];
connectedPeers.copyInto(t);
connectedPeers.clear();
return t;
}
}
public ReceivePort localPort() {
return this;
}
public synchronized void enableConnections() {
log.in();
if (!connectionEnabled) {
connectionEnabled = true;
connectionLock.unlock();
}
log.out();
}
public synchronized void disableConnections() {
log.in();
if (connectionEnabled) {
connectionEnabled = false;
while (true) {
try {
connectionLock.lock();
break;
} catch (InterruptedIOException e) {
System.err.println("InterruptedIOException ignored in "
+ this + ".disableConnections");
}
}
}
log.out();
}
public synchronized void enableUpcalls() {
log.in();
synchronized (dummyUpcallSync) {
upcallsEnabled = true;
if (upcallsPending > 0) {
dummyUpcallSync.notify();
}
}
log.out();
}
public synchronized void disableUpcalls() {
log.in();
upcallsEnabled = false;
log.out();
}
private void close(NetConnection cnx, boolean forced) throws IOException {
log.in();
if (cnx == null) {
log.out("cnx = null");
return;
}
synchronized (connectionTable) {
while (!forced && cnx.msgSeqno < cnx.closeSeqno) {
try {
cnx.regularClosers++;
connectionTable.wait();
cnx.regularClosers--;
} catch (InterruptedException e) {
break;
}
}
}
trace.disp(receivePortTracePrefix, "network connection shutdown-->");
input.close(cnx.getNum());
trace.disp(receivePortTracePrefix, "network connection shutdown<--");
try {
cnx.close();
} catch (Exception e) {
throw new Error(e.getMessage());
}
log.out();
}
/**
* Closes the port.
*/
public void close() throws IOException {
close(false, 0);
}
/**
* Closes the port.
*/
public void close(long timeout) throws IOException {
if (timeout == 0) {
close(false, 0);
} else if (timeout < 0) {
close(true, 0L);
} else {
close(false, timeout);
}
}
private void close(boolean force, long timeout) throws IOException {
log.in();
trace.disp(receivePortTracePrefix, "receive port shutdown-->");
if (timeout != 0) {
__.unimplemented__("void close(long timeout)");
}
synchronized (this) {
if (closed) {
return;
}
closed = true;
}
if (!force && connectionTable != null) {
synchronized (connectionTable) {
boolean closing;
// Complicated looping construct. In the wait, we
// release the lock on connectionTable, so some other
// thread may/will modify the table. Then the Iterator
// is no longer valid, and we must start the whole
// procedure again.
outer: do {
closing = false;
Iterator i = connectionTable.values().iterator();
middle: while (i.hasNext()) {
NetConnection cnx = (NetConnection) i.next();
while (cnx.msgSeqno < cnx.closeSeqno) {
try {
cnx.regularClosers++;
connectionTable.wait();
cnx.regularClosers--;
closing = true;
// System.err.println("Do the cycle again, NetConnection iterator broken");
break middle;
} catch (InterruptedException e) {
break outer;
}
}
}
} while (closing);
}
}
synchronized (this) {
trace.disp(receivePortTracePrefix,
"receive port shutdown: input locked");
if (acceptThread != null) {
acceptThread.end();
while (true) {
try {
acceptThread.join();
break;
} catch (InterruptedException e) {
//
}
}
}
trace.disp(receivePortTracePrefix,
"receive port shutdown: accept thread terminated");
if (connectionTable != null) {
while (true) {
NetConnection cnx = null;
synchronized (connectionTable) {
Iterator i = connectionTable.values().iterator();
if (!i.hasNext()) {
break;
}
cnx = (NetConnection) i.next();
if (rpcu != null) {
rpcu.lostConnection(this, cnx.getSendId(),
new Exception());
}
i.remove();
}
if (cnx != null) {
close(cnx, force);
}
}
}
trace.disp(receivePortTracePrefix,
"receive port shutdown: all connections closed");
if (input != null) {
inputLock.lock();
try {
input.free();
input = null;
} finally {
inputLock.unlock();
}
}
trace.disp(receivePortTracePrefix,
"receive port shutdown: all inputs freed");
trace.disp(receivePortTracePrefix,
"receive port shutdown: input lock released");
}
if (type.manyToOne() && maxLiveConnections == 1) {
System.err.println(this
+ ": ManyToOne portType but only one connection");
}
ibis.unregister(this);
trace.disp(receivePortTracePrefix, "receive port shutdown<--");
log.out();
}
protected void finalize() throws Throwable {
log.in();
close();
if (eventQueueListener != null) {
eventQueueListener.end();
while (true) {
try {
eventQueueListener.join();
break;
} catch (InterruptedException e) {
//
}
}
eventQueueListener = null;
}
super.finalize();
log.out();
}
public long getCount() {
// TODO
return 0;
}
public void resetCount() {
// TODO
}
/* --- ReadMessage Part --- */
public long finish() throws IOException {
log.in();
if (emptyMsg) {
input.handleEmptyMsg();
emptyMsg = false;
}
trace.disp(receivePortTracePrefix, "message receive <--");
NetConnection cnx = checkClose();
NetSendPortIdentifier id = cnx.getSendId();
activeSendPortNum = null;
currentThread = null;
input.finish();
if (finishNotify) {
finishNotify = false;
finishMutex.unlock();
}
if (pollingNotify) {
pollingNotify = false;
pollingLock.unlock();
}
log.out();
// TODO: return byte count of message
return 0;
}
public void finish(IOException e) {
// What to do here? Rutger?
try {
finish();
} catch (IOException e2) {
// Give up
}
}
public SendPortIdentifier origin() {
log.in();
SendPortIdentifier spi = getActiveSendPortIdentifier();
log.out();
return spi;
}
public boolean readBoolean() throws IOException {
log.in();
emptyMsg = false;
boolean v = input.readBoolean();
log.out();
return v;
}
public byte readByte() throws IOException {
log.in();
emptyMsg = false;
byte v = input.readByte();
log.out();
return v;
}
public char readChar() throws IOException {
log.in();
emptyMsg = false;
char v = input.readChar();
log.out();
return v;
}
public short readShort() throws IOException {
log.in();
emptyMsg = false;
short v = input.readShort();
log.out();
return v;
}
public int readInt() throws IOException {
log.in();
emptyMsg = false;
int v = input.readInt();
log.out();
return v;
}
public long readLong() throws IOException {
log.in();
emptyMsg = false;
long v = input.readLong();
log.out();
return v;
}
public float readFloat() throws IOException {
log.in();
emptyMsg = false;
float v = input.readFloat();
log.out();
return v;
}
public double readDouble() throws IOException {
log.in();
emptyMsg = false;
double v = input.readDouble();
log.out();
return v;
}
public String readString() throws IOException {
log.in();
emptyMsg = false;
String v = input.readString();
log.out();
return v;
}
public Object readObject() throws IOException, ClassNotFoundException {
log.in();
emptyMsg = false;
Object v = input.readObject();
log.out();
return v;
}
public void readArray(boolean[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(byte[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(char[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(short[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(int[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(long[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(float[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(double[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(Object[] b) throws IOException,
ClassNotFoundException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(boolean[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(byte[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(char[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(short[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(int[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(long[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(float[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(double[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(Object[] b, int o, int l) throws IOException,
ClassNotFoundException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
log.out();
}
}
| src/ibis/impl/net/NetReceivePort.java | package ibis.impl.net;
import ibis.ipl.ConnectionTimedOutException;
import ibis.ipl.IbisConfigurationException;
import ibis.ipl.ReadMessage;
import ibis.ipl.ReceivePort;
import ibis.ipl.ReceivePortConnectUpcall;
import ibis.ipl.ReceivePortIdentifier;
import ibis.ipl.ReceiveTimedOutException;
import ibis.ipl.SendPortIdentifier;
import ibis.ipl.Upcall;
import ibis.util.IPUtils;
import ibis.util.TypedProperties;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Vector;
/**
* Provides an implementation of the {@link ReceivePort} and {@link
* ReadMessage} interfaces of the IPL.
*/
public final class NetReceivePort extends NetPort implements ReceivePort,
ReadMessage, NetInputUpcall, NetEventQueueConsumer {
/* ___ INTERNAL CLASSES ____________________________________________ */
/* --- incoming connection manager thread -- */
/**
* The incoming connection management thread class.
*/
private final class AcceptThread extends Thread {
/**
* Flag indicating whether thread termination was requested.
*/
private boolean end = false;
public AcceptThread(String name) {
super("NetReceivePort.AcceptThread: " + name);
setDaemon(true);
}
/**
* The incoming connection management function.
*/
/*
* // Note: the thread is <strong>uninterruptible</strong>
* // during the network input locking.
*/
public void run() {
log.in("accept thread starting");
accept_loop: while (!end) {
NetServiceLink link = null;
try {
link = new NetServiceLink(eventQueue, serverSocket);
} catch (ConnectionTimedOutException e) {
continue accept_loop;
} catch (InterruptedIOException e) {
continue accept_loop;
} catch (IOException e) {
__.fwdAbort__(e);
}
Integer num = null;
NetSendPortIdentifier spi = null;
long startSeqno = 0;
num = new Integer(nextSendPortNum++);
try {
link.init(num);
} catch (IOException e) {
__.fwdAbort__(e);
}
String peerPrefix = null;
try {
ObjectInputStream is = new ObjectInputStream(
link.getInputSubStream("__port__"));
spi = (NetSendPortIdentifier) is.readObject();
int rank = is.readInt();
int spmid = is.readInt();
startSeqno = is.readLong();
trace.disp(receivePortTracePrefix
+ "New connection from: _s" + rank + "-" + spmid
+ "_");
peerPrefix = "_s" + rank + "-" + spmid + "_";
is.close();
ObjectOutputStream os = new ObjectOutputStream(
link.getOutputSubStream("__port__"));
os.writeInt(receivePortMessageRank);
os.writeInt(receivePortMessageId);
os.flush();
os.close();
} catch (IOException e) {
__.fwdAbort__(e);
} catch (ClassNotFoundException e) {
__.fwdAbort__(e);
}
connect_loop: while (!end) {
try {
connectionLock.ilock();
try {
//inputLock.lock();
NetConnection cnx = new NetConnection(
NetReceivePort.this, num, spi, identifier,
link, startSeqno);
synchronized (connectionTable) {
connectionTable.put(num, cnx);
connectedPeers.add(spi);
if (rpcu != null) {
rpcu.gotConnection(NetReceivePort.this,
spi);
}
if (connectionTable.size() == 1) {
singleConnection = cnx;
} else {
singleConnection = null;
}
if (connectionTable.size()
> maxLiveConnections) {
maxLiveConnections = connectionTable.size();
}
}
input.setupConnection(cnx);
input.startReceive();
//inputLock.unlock();
} finally {
connectionLock.unlock();
}
} catch (InterruptedIOException e) {
System.err.println(NetIbis.hostName()
+ ": While connecting meet " + e);
continue connect_loop;
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
}
break connect_loop;
}
}
log.out("accept thread leaving");
}
protected void end() throws IOException {
log.in();
synchronized (connectionLock) {
end = true;
}
if (serverSocket != null) {
serverSocket.close();
}
log.out();
}
}
/* ___ CONFIGURATION FLAGS _________________________________________ */
private boolean useUpcall = false;
/**
* Flag indicating whether unsuccessful active polling should be
* followed by a yield.
*
* Note: this flag only affects asynchronous multithreaded
* polling or synchronous {@link #receive} operations. In
* particular, the synchronous {@link #poll} operation is not
* affected.
*/
private boolean useYield = true;
/**
* Flag indicating whether receive should block in the poll()
* that necessarily precedes it, or whether we want to poll in a
* busy-wait style from this.
*/
public static final boolean useBlockingPoll = true;
/* ___ EVENT QUEUE _________________________________________________ */
private NetEventQueue eventQueue = null;
private NetEventQueueListener eventQueueListener = null;
/* ___ LESS-IMPORTANT OBJECTS ______________________________________ */
/**
* The upcall callback function.
*/
private Upcall upcall = null;
private ReceivePortConnectUpcall rpcu = null;
/**
* The port identifier.
*/
private NetReceivePortIdentifier identifier = null;
/**
* The TCP server socket.
*/
private ServerSocket serverSocket = null;
/**
* The next send port integer number.
*/
private int nextSendPortNum = 0;
/**
* Performance statistic
*/
// private int n_yield;
/**
* Maintain a linked list for cleanup
*/
NetReceivePort next = null;
/* ___ IMPORTANT OBJECTS ___________________________________________ */
/**
* Cache a single connection for fast lookup in the frequent case of one
* connection
*/
private NetConnection singleConnection = null;
/**
* The port's topmost input.
*/
private NetInput input = null;
/* ___ THREADS _____________________________________________________ */
/**
* The incoming connection management thread.
*/
private AcceptThread acceptThread = null;
/* ___ STATE _______________________________________________________ */
/**
* The current active peer port.
*/
private Integer activeSendPortNum = null;
/**
* Flag indicating whether incoming connections are currently enabled.
*/
private boolean connectionEnabled = false;
/**
* Flag indicating whether successful polling operation should
* generate an upcall or not.
*/
private boolean upcallsEnabled = false;
/**
* The empty message detection flag.
*
* The flag is set on each new {@link #_receive} call and should be
* cleared as soon as at least a byte as been added to the living message.
*/
private boolean emptyMsg = true;
/**
* Indicate whether {@link #finish} should unlock the {@link #finishMutex}.
*/
private boolean finishNotify = false;
/**
* Indicate whether {@link #finish} should unlock the {@link #pollingLock}
*/
private boolean pollingNotify = false;
/**
* Reference the current upcall thread.
*
*/
private Runnable currentThread = null;
/**
* Internal receive port counter, for debugging.
*/
static private int receivePortCount = 0;
/**
* Seqno for numbered messages
*/
private long messageSeqno = -1;
/**
* Internal receive port id, for debugging.
*/
private int receivePortMessageId = -1;
/**
* Process rank, for debugging.
*/
private int receivePortMessageRank = 0;
/**
* Tracing log message prefix, for debugging.
*
*/
private String receivePortTracePrefix = null;
private Vector connectedPeers = null;
private Vector disconnectedPeers = null;
private int maxLiveConnections = 0;
private boolean closed = false;
/* ___ LOCKS _______________________________________________________ */
/**
* The polling autorisation lock.
*/
private NetMutex pollingLock = null;
/**
* The message extraction autorisation lock.
*/
private NetMutex polledLock = null;
/**
* The incoming connection acceptation lock.
*/
private NetMutex connectionLock = null;
/**
* The network input synchronization lock.
*/
private ibis.util.Monitor inputLock = null;
private NetMutex finishMutex = null;
private Object dummyUpcallSync = new Object();
private int upcallsPending = 0;
/**
* Make a fast path for the (frequent) case that there is only one
* connection
*/
private NetConnection getActiveConnection() {
if (activeSendPortNum == null) {
return null;
}
if (singleConnection != null) {
return singleConnection;
}
return (NetConnection) connectionTable.get(activeSendPortNum);
}
private NetConnection checkClose() {
NetConnection cnx = getActiveConnection();
cnx.msgSeqno++;
if (cnx.msgSeqno >= cnx.closeSeqno) {
synchronized (connectionTable) {
connectionTable.notifyAll();
}
}
return cnx;
}
/* --- Upcall from main input object -- */
public void inputUpcall(NetInput input, Integer spn) throws IOException {
log.in();
if (this.input == null) {
__.warning__("message lost");
return;
}
if (spn == null) {
throw new Error("invalid state: NetReceivePort.inputUpcall");
}
activeSendPortNum = spn;
if (upcall != null && !upcallsEnabled) {
synchronized (dummyUpcallSync) {
upcallsPending++;
while (!upcallsEnabled) {
try {
dummyUpcallSync.wait();
} catch (InterruptedException e) {
// Go on waiting
}
}
upcallsPending--;
}
}
if (upcall != null && upcallsEnabled) {
final ReadMessage rm = _receive();
Thread me = Thread.currentThread();
currentThread = me;
upcall.upcall(rm);
if (me == currentThread) {
currentThread = null;
if (emptyMsg) {
input.handleEmptyMsg();
emptyMsg = false;
}
checkClose();
trace.disp(receivePortTracePrefix, "message receive <--");
}
} else {
synchronized (dummyUpcallSync) {
finishNotify = true;
polledLock.unlock();
finishMutex.lock();
}
}
log.out();
}
public void closeFromRemote(NetConnection cnx) {
synchronized (connectionTable) {
if (cnx.regularClosers > 0) {
connectionTable.notifyAll();
}
}
}
/* --- NetEventQueueConsumer part --- */
public void event(NetEvent e) {
log.in();
NetPortEvent event = (NetPortEvent) e;
log.disp("IN: event.code() = ", event.code());
switch (event.code()) {
case NetPortEvent.CLOSE_EVENT: {
Integer num = (Integer) event.arg();
NetConnection cnx;
boolean timed_out = false;
while (true) {
synchronized (connectionTable) {
cnx = (NetConnection) connectionTable.get(num);
}
if (cnx == null) {
return;
}
if (timed_out) {
break;
}
if (cnx.closeSeqno != Long.MAX_VALUE) {
// Maybe we overtook the regular disconnect.
// Give it a little time to finish
try {
Thread.sleep(1000);
timed_out = true;
} catch (InterruptedException ei) {
// Give up
}
}
}
NetSendPortIdentifier nspi = null;
synchronized (connectionTable) {
nspi = cnx.getSendId();
cnx = (NetConnection) connectionTable.remove(cnx.getNum());
if (connectionTable.size() == 1) {
Enumeration elts = connectionTable.elements();
singleConnection = (NetConnection) elts.nextElement();
} else {
singleConnection = null;
}
disconnectedPeers.add(nspi);
}
if (rpcu != null) {
rpcu.lostConnection(this, nspi, new Exception());
}
try {
close(cnx, false);
} catch (IOException ei) {
throw new Error("close fails");
}
synchronized (connectionTable) {
cnx.closeSeqno = 0;
connectionTable.notifyAll();
}
}
break;
default:
throw new Error("invalid event code");
}
log.out();
}
/* --- NetReceivePort part --- */
/*
* Constructor.
*
* @param type the {@linkplain ibis.impl.net.NetPortType port type}.
* @param name the name of the port.
* @param upcall the reception upcall callback.
*/
public NetReceivePort(NetPortType type, String name, Upcall upcall,
ReceivePortConnectUpcall rpcu, boolean connectionAdministration)
throws IOException {
this.type = type;
this.name = name;
this.upcall = upcall;
this.rpcu = rpcu;
this.ibis = type.getIbis();
initDebugStreams();
initPassiveObjects();
initGlobalSettings(upcall != null);
initMainInput();
initServerSocket();
initIdentifier();
initActiveObjects();
ibis.register(this);
start();
}
private void initDebugStreams() {
receivePortMessageId = receivePortCount++;
receivePortMessageRank = ((NetIbis) type.getIbis()).closedPoolRank();
receivePortTracePrefix = "_r" + receivePortMessageRank + "-"
+ receivePortMessageId + "_ ";
String s = "//" + type.name() + " receivePort(" + name + ")/";
boolean log = type.getBooleanStringProperty(null, "Log", false);
boolean trace = type.getBooleanStringProperty(null, "Trace", false);
boolean disp = type.getBooleanStringProperty(null, "Disp",
TypedProperties.booleanProperty("net.disp"));
this.log = new NetLog(log, s, "LOG");
this.trace = new NetLog(trace, s, "TRACE");
this.disp = new NetLog(disp, s, "DISP");
this.trace.disp(receivePortTracePrefix, " receive port created");
}
private void initPassiveObjects() {
log.in();
connectionTable = new Hashtable();
polledLock = new NetMutex(true);
pollingLock = new NetMutex(false);
connectionLock = new NetMutex(true);
inputLock = new ibis.util.Monitor();
finishMutex = new NetMutex(true);
props = new NetDynamicProperties();
log.out();
}
private void initMainInput() throws IOException {
log.in();
String mainDriverName = type.getStringProperty("/", "Driver");
if (mainDriverName == null) {
throw new IbisConfigurationException("root driver not specified");
}
driver = ibis.getDriver(mainDriverName);
if (driver == null) {
throw new IbisConfigurationException("driver not found");
}
input = driver.newInput(type, null, useUpcall ? this : null);
log.out();
}
private void initGlobalSettings(boolean upcallSpecified) {
log.in();
useYield = type.getBooleanStringProperty(null, "UseYield",
TypedProperties.booleanProperty(NetIbis.port_yield, useYield));
useUpcall = type.getBooleanStringProperty(null, "UseUpcall", useUpcall);
// useBlockingPoll = type.getBooleanStringProperty(null, "UseBlockingPoll", useBlockingPoll );
if (!useUpcall && upcallSpecified) {
useUpcall = true;
}
if (!useYield) {
System.err.println("useYield " + useYield);
}
disp.disp("__ Configuration ____");
disp.disp("Upcall engine........", __.state__(useUpcall));
disp.disp("Yield................", __.state__(useYield));
disp.disp("Blocking poll........", __.state__(useBlockingPoll));
disp.disp("_____________________");
log.out();
}
private void initServerSocket() throws IOException {
log.in();
serverSocket = NetIbis.socketFactory.createServerSocket(0, 0,
IPUtils.getLocalHostAddress());
log.out();
}
private void initIdentifier() {
log.in();
Hashtable info = new Hashtable();
InetAddress addr = serverSocket.getInetAddress();
int port = serverSocket.getLocalPort();
info.put("accept_address", addr);
info.put("accept_port", new Integer(port));
NetIbisIdentifier ibisId = (NetIbisIdentifier) ibis.identifier();
identifier = new NetReceivePortIdentifier(name, type.name(), ibisId,
info);
log.out();
}
private void initActiveObjects() {
log.in();
connectedPeers = new Vector();
disconnectedPeers = new Vector();
acceptThread = new AcceptThread(name);
eventQueue = new NetEventQueue();
eventQueueListener = new NetEventQueueListener(this, "ReceivePort: "
+ name, eventQueue);
log.out();
}
private void start() {
log.in();
eventQueueListener.setDaemon(true);
eventQueueListener.start();
acceptThread.start();
log.out();
}
private Thread pollerThread;
/**
* The internal synchronous polling function.
*
* The calling thread is <strong>uninterruptible</strong> during
* the network input locking operation. The function may block
* if the {@linkplain #inputLock network input lock} is not available.
*/
private boolean _doPoll(boolean block) throws IOException {
log.in();
inputLock.lock();
pollerThread = Thread.currentThread();
try {
activeSendPortNum = input.poll(block);
} finally {
pollerThread = null;
inputLock.unlock();
}
if (activeSendPortNum == null) {
log.out("activeSendPortNum = null");
return false;
}
log.out("activeSendPortNum = ", activeSendPortNum);
return true;
}
public long sequenceNumber() {
return messageSeqno;
}
/**
* Internally initializes a new reception.
*/
private ReadMessage _receive() throws IOException {
log.in();
emptyMsg = true;
if (type.numbered()) {
emptyMsg = false;
messageSeqno = input.readSeqno();
// System.err.println(NetIbis.hostName() + " " + this + ": receive msg with seqno " + messageSeqno);
}
if (trace.on()) {
final String messageId = readString();
trace.disp(receivePortTracePrefix, "message receive --> ",
messageId);
}
log.out();
return this;
}
/**
* Blockingly attempts to receive a message.
*
* Note: if upcalls are currently enabled, this function is bypassed
* by the upcall callback unless no callback has been specified.
*
* @return A {@link ReadMessage} instance.
*/
public ReadMessage receive() throws IOException {
log.in();
if (useUpcall) {
polledLock.lock();
} else {
if (useYield) {
while (!_doPoll(useBlockingPoll)) {
NetIbis.yield();
// n_yield++;
}
} else {
while (!_doPoll(useBlockingPoll)) { /* do noting */
}
}
}
log.out();
return _receive();
}
public ReadMessage receive(long millis) throws IOException {
if (millis == 0) {
return receive();
} else {
long top = System.currentTimeMillis();
ReadMessage rm = null;
do {
rm = poll();
} while (rm == null && (System.currentTimeMillis() - top) < millis);
if (rm == null) {
throw new ReceiveTimedOutException("timeout expired in receive");
}
return rm;
}
}
/**
* Unblockingly attempts to receive a message.
*
* Note: if upcalls are currently enabled, this function is bypassed
* by the upcall callback unless no callback has been specified.
*
* @return A {@link ReadMessage} instance or <code>null</code> if polling
* was unsuccessful.
*/
public ReadMessage poll() throws IOException {
log.in();
if (useUpcall) {
if (!polledLock.trylock()) {
log.out("poll failure 1");
return null;
}
} else {
if (!_doPoll(false)) {
log.out("poll failure 2");
return null;
}
}
log.out("poll success");
return _receive();
}
public ReceivePortIdentifier identifier() {
log.in();
log.out();
return identifier;
}
/**
* Returns the identifier of the current active port peer or
* <code>null</code> if no peer port is active.
*
* @return The identifier of the port.
*/
protected NetSendPortIdentifier getActiveSendPortIdentifier() {
log.in();
NetConnection cnx = getActiveConnection();
if (cnx == null) {
throw new Error("no active sendPort");
}
if (cnx.getSendId() == null) {
throw new Error("invalid state: cnx.getSendId");
}
NetSendPortIdentifier id = cnx.getSendId();
log.out();
return id;
}
public SendPortIdentifier[] connectedTo() {
synchronized (connectionTable) {
SendPortIdentifier t[]
= new SendPortIdentifier[connectionTable.size()];
Iterator it = connectionTable.values().iterator();
int i = 0;
while (it.hasNext()) {
NetConnection cnx = (NetConnection) it.next();
t[i++] = cnx.getSendId();
}
return t;
}
}
public SendPortIdentifier[] lostConnections() {
synchronized (connectionTable) {
SendPortIdentifier t[]
= new SendPortIdentifier[disconnectedPeers.size()];
disconnectedPeers.copyInto(t);
disconnectedPeers.clear();
return t;
}
}
public SendPortIdentifier[] newConnections() {
synchronized (connectionTable) {
SendPortIdentifier t[]
= new SendPortIdentifier[connectedPeers.size()];
connectedPeers.copyInto(t);
connectedPeers.clear();
return t;
}
}
public ReceivePort localPort() {
return this;
}
public synchronized void enableConnections() {
log.in();
if (!connectionEnabled) {
connectionEnabled = true;
connectionLock.unlock();
}
log.out();
}
public synchronized void disableConnections() {
log.in();
if (connectionEnabled) {
connectionEnabled = false;
while (true) {
try {
connectionLock.lock();
break;
} catch (InterruptedIOException e) {
System.err.println("InterruptedIOException ignored in "
+ this + ".disableConnections");
}
}
}
log.out();
}
public synchronized void enableUpcalls() {
log.in();
synchronized (dummyUpcallSync) {
upcallsEnabled = true;
if (upcallsPending > 0) {
dummyUpcallSync.notify();
}
}
log.out();
}
public synchronized void disableUpcalls() {
log.in();
upcallsEnabled = false;
log.out();
}
private void close(NetConnection cnx, boolean forced) throws IOException {
log.in();
if (cnx == null) {
log.out("cnx = null");
return;
}
synchronized (connectionTable) {
while (!forced && cnx.msgSeqno < cnx.closeSeqno) {
try {
cnx.regularClosers++;
connectionTable.wait();
cnx.regularClosers--;
} catch (InterruptedException e) {
break;
}
}
}
trace.disp(receivePortTracePrefix, "network connection shutdown-->");
input.close(cnx.getNum());
trace.disp(receivePortTracePrefix, "network connection shutdown<--");
try {
cnx.close();
} catch (Exception e) {
throw new Error(e.getMessage());
}
log.out();
}
/**
* Closes the port.
*/
public void close() throws IOException {
close(false, 0);
}
/**
* Closes the port.
*/
public void close(long timeout) throws IOException {
if (timeout == 0) {
close(false, 0);
} else if (timeout < 0) {
close(true, 0L);
} else {
close(false, timeout);
}
}
private void close(boolean force, long timeout) throws IOException {
log.in();
trace.disp(receivePortTracePrefix, "receive port shutdown-->");
if (timeout != 0) {
__.unimplemented__("void close(long timeout)");
}
synchronized (this) {
if (closed) {
return;
}
closed = true;
}
if (!force && connectionTable != null) {
synchronized (connectionTable) {
boolean closing;
// Complicated looping construct. In the wait, we
// release the lock on connectionTable, so some other
// thread may/will modify the table. Then the Iterator
// is no longer valid, and we must start the whole
// procedure again.
outer: do {
closing = false;
Iterator i = connectionTable.values().iterator();
middle: while (i.hasNext()) {
NetConnection cnx = (NetConnection) i.next();
while (cnx.msgSeqno < cnx.closeSeqno) {
try {
cnx.regularClosers++;
connectionTable.wait();
cnx.regularClosers--;
closing = true;
// System.err.println("Do the cycle again, NetConnection iterator broken");
break middle;
} catch (InterruptedException e) {
break outer;
}
}
}
} while (closing);
}
}
synchronized (this) {
trace.disp(receivePortTracePrefix,
"receive port shutdown: input locked");
if (acceptThread != null) {
acceptThread.end();
while (true) {
try {
acceptThread.join();
break;
} catch (InterruptedException e) {
//
}
}
}
trace.disp(receivePortTracePrefix,
"receive port shutdown: accept thread terminated");
if (connectionTable != null) {
while (true) {
NetConnection cnx = null;
synchronized (connectionTable) {
Iterator i = connectionTable.values().iterator();
if (!i.hasNext()) {
break;
}
cnx = (NetConnection) i.next();
if (rpcu != null) {
rpcu.lostConnection(this, cnx.getSendId(),
new Exception());
}
i.remove();
}
if (cnx != null) {
close(cnx, force);
}
}
}
trace.disp(receivePortTracePrefix,
"receive port shutdown: all connections closed");
if (input != null) {
inputLock.lock();
try {
input.free();
input = null;
} finally {
inputLock.unlock();
}
}
trace.disp(receivePortTracePrefix,
"receive port shutdown: all inputs freed");
trace.disp(receivePortTracePrefix,
"receive port shutdown: input lock released");
}
if (type.manyToOne() && maxLiveConnections == 1) {
System.err.println(this
+ ": ManyToOne portType but only one connection");
}
ibis.unregister(this);
trace.disp(receivePortTracePrefix, "receive port shutdown<--");
log.out();
}
protected void finalize() throws Throwable {
log.in();
close();
if (eventQueueListener != null) {
eventQueueListener.end();
while (true) {
try {
eventQueueListener.join();
break;
} catch (InterruptedException e) {
//
}
}
eventQueueListener = null;
}
super.finalize();
log.out();
}
public long getCount() {
// TODO
return 0;
}
public void resetCount() {
// TODO
}
/* --- ReadMessage Part --- */
public long finish() throws IOException {
log.in();
if (emptyMsg) {
input.handleEmptyMsg();
emptyMsg = false;
}
trace.disp(receivePortTracePrefix, "message receive <--");
NetConnection cnx = checkClose();
NetSendPortIdentifier id = cnx.getSendId();
activeSendPortNum = null;
currentThread = null;
input.finish();
if (finishNotify) {
finishNotify = false;
finishMutex.unlock();
}
if (pollingNotify) {
pollingNotify = false;
pollingLock.unlock();
}
log.out();
// TODO: return byte count of message
return 0;
}
public void finish(IOException e) {
// What to do here? Rutger?
try {
finish();
} catch (IOException e2) {
// Give up
}
}
public SendPortIdentifier origin() {
log.in();
SendPortIdentifier spi = getActiveSendPortIdentifier();
log.out();
return spi;
}
public boolean readBoolean() throws IOException {
log.in();
emptyMsg = false;
boolean v = input.readBoolean();
log.out();
return v;
}
public byte readByte() throws IOException {
log.in();
emptyMsg = false;
byte v = input.readByte();
log.out();
return v;
}
public char readChar() throws IOException {
log.in();
emptyMsg = false;
char v = input.readChar();
log.out();
return v;
}
public short readShort() throws IOException {
log.in();
emptyMsg = false;
short v = input.readShort();
log.out();
return v;
}
public int readInt() throws IOException {
log.in();
emptyMsg = false;
int v = input.readInt();
log.out();
return v;
}
public long readLong() throws IOException {
log.in();
emptyMsg = false;
long v = input.readLong();
log.out();
return v;
}
public float readFloat() throws IOException {
log.in();
emptyMsg = false;
float v = input.readFloat();
log.out();
return v;
}
public double readDouble() throws IOException {
log.in();
emptyMsg = false;
double v = input.readDouble();
log.out();
return v;
}
public String readString() throws IOException {
log.in();
emptyMsg = false;
String v = input.readString();
log.out();
return v;
}
public Object readObject() throws IOException, ClassNotFoundException {
log.in();
emptyMsg = false;
Object v = input.readObject();
log.out();
return v;
}
public void readArray(boolean[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(byte[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(char[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(short[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(int[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(long[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(float[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(double[] b) throws IOException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(Object[] b) throws IOException,
ClassNotFoundException {
log.in();
readArray(b, 0, b.length);
log.out();
}
public void readArray(boolean[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(byte[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(char[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(short[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(int[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(long[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(float[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(double[] b, int o, int l) throws IOException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
}
public void readArray(Object[] b, int o, int l) throws IOException,
ClassNotFoundException {
log.in();
if (l == 0) {
log.out("l = 0");
return;
}
emptyMsg = false;
input.readArray(b, o, l);
log.out();
}
}
| Do still just one layout thingy
git-svn-id: f22e84ca493ccad7df8d2727bca69d1c9fc2e5c5@2818 aaf88347-d911-0410-b711-e54d386773bb
| src/ibis/impl/net/NetReceivePort.java | Do still just one layout thingy | <ide><path>rc/ibis/impl/net/NetReceivePort.java
<ide>
<ide> public SendPortIdentifier[] connectedTo() {
<ide> synchronized (connectionTable) {
<del> SendPortIdentifier t[]
<del> = new SendPortIdentifier[connectionTable.size()];
<add> int size = connectionTable.size();
<add> SendPortIdentifier t[] = new SendPortIdentifier[size];
<ide>
<ide> Iterator it = connectionTable.values().iterator();
<ide> int i = 0;
<ide>
<ide> public SendPortIdentifier[] lostConnections() {
<ide> synchronized (connectionTable) {
<del> SendPortIdentifier t[]
<del> = new SendPortIdentifier[disconnectedPeers.size()];
<add> int size = disconnectedPeers.size();
<add> SendPortIdentifier t[] = new SendPortIdentifier[size];
<ide> disconnectedPeers.copyInto(t);
<ide> disconnectedPeers.clear();
<ide>
<ide>
<ide> public SendPortIdentifier[] newConnections() {
<ide> synchronized (connectionTable) {
<del> SendPortIdentifier t[]
<del> = new SendPortIdentifier[connectedPeers.size()];
<add> int size = connectedPeers.size();
<add> SendPortIdentifier t[] = new SendPortIdentifier[size];
<ide> connectedPeers.copyInto(t);
<ide> connectedPeers.clear();
<ide> |
|
Java | unlicense | 27f5b6df89cf0a677d52c9b812027f5a8147cb55 | 0 | Ohohcakester/Any-Angle-Pathfinding,Ohohcakester/Any-Angle-Pathfinding | package main;
import java.awt.Color;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import algorithms.AStar;
import algorithms.Anya;
import algorithms.BasicThetaStar;
import algorithms.JumpPointSearch;
import algorithms.StrictVisibilityGraphAlgorithm;
import algorithms.StrictVisibilityGraphAlgorithmV2;
import algorithms.datatypes.Point;
import algorithms.datatypes.SnapshotItem;
import algorithms.sparsevgs.LineOfSightScanner;
import algorithms.sparsevgs.SparseVisibilityGraph;
import algorithms.strictthetastar.RecursiveStrictThetaStar;
import algorithms.strictthetastar.StrictThetaStar;
import algorithms.visibilitygraph.VisibilityGraph;
import draw.DrawCanvas;
import draw.GridLineSet;
import draw.GridObjects;
import draw.GridPointSet;
import grid.GridAndGoals;
import grid.GridGraph;
import grid.ReachableNodes;
import grid.StartGoalPoints;
import main.graphgeneration.DefaultGenerator;
import main.utility.Utility;
import uiandio.FileIO;
import uiandio.GraphImporter;
public class Experiment {
public static void run() {
// testVisibilityGraphSize();
// testAbilityToFindGoal();
// findStrictThetaStarIssues();
// findUpperBound();
// testAlgorithmOptimality();
//testAgainstReferenceAlgorithm();
//countTautPaths();
// other();
testLOSScan();
}
/**
* Custom code for experiments.
*/
public static void other() {
// This is how to generate test data for the grid. (Use the VisibilityGraph algorithm to generate optimal path lengths)
// ArrayList<Point> points = ReachableNodes.computeReachable(gridGraph, 5, 5);
// System.out.println(points.size());
//
// generateRandomTestDataAndPrint(gridGraph);
//This is how to conduct a running time / path length test for tha algorithm:
// TestResult test1 = testAlgorithm(gridGraph, sx, sy, ex, ey, 1, 1);
// System.out.println(test1);
// TestResult test2 = testAlgorithm(gridGraph, sx, sy, ex, ey, 30, 25);
// System.out.println(test2);
}
/**
* Generates and prints out random test data for the gridGraph in question. <br>
* Note: the algorithm used is the one specified in the algoFunction.
* Use setDefaultAlgoFunction to choose the algorithm.
* @param gridGraph the grid to test.
*/
private static void generateRandomTestDataAndPrint(GridGraph gridGraph) {
AlgoFunction algo = AnyAnglePathfinding.setDefaultAlgoFunction();
ArrayList<Point> points = ReachableNodes.computeReachable(gridGraph, 5, 5);
LinkedList<Integer> startX = new LinkedList<>();
LinkedList<Integer> startY = new LinkedList<>();
LinkedList<Integer> endX = new LinkedList<>();
LinkedList<Integer> endY = new LinkedList<>();
LinkedList<Double> length = new LinkedList<>();
int size = points.size();
System.out.println("Points: " + size);
for (int i=0; i<100; i++) {
Random random = new Random();
int first = random.nextInt(size);
int last = random.nextInt(size-1);
if (last == first) last = size-1; // prevent first and last from being the same
Point s = points.get(first);
Point f = points.get(last);
int[][] path = Utility.generatePath(algo, gridGraph, s.x, s.y, f.x, f.y);
if (path.length >= 2) {
double len = Utility.computePathLength(gridGraph, path);
startX.offer(s.x);
startY.offer(s.y);
endX.offer(f.x);
endY.offer(f.y);
length.offer(len);
}
if (i%10 == 0) System.out.println("Computed: " + i);
}
System.out.println(startX);
System.out.println(startY);
System.out.println(endX);
System.out.println(endY);
System.out.println(length);
}
/**
* Returns true iff there is a path from the start to the end. Uses the current algorithm to check.<br>
* Use setDefaultAlgoFunction to choose the algorithm.
*/
private static boolean hasSolution(AlgoFunction algo, GridGraph gridGraph, StartGoalPoints p) {
int[][] path = Utility.generatePath(algo, gridGraph, p.sx, p.sy, p.ex, p.ey);
return path.length > 1;
}
private static void testLOSScan() {
GridAndGoals gridAndGoals = AnyAnglePathfinding.loadMaze();
GridGraph gridGraph = gridAndGoals.gridGraph;
ArrayList<GridObjects> gridObjectsList = new ArrayList<>();
GridLineSet gridLineSet = new GridLineSet();;
GridPointSet gridPointSet = new GridPointSet();
int dx, dy;
Random rand = new Random();
{
int sx = rand.nextInt(gridGraph.sizeX+1);
int sy = rand.nextInt(gridGraph.sizeY+1);
sx = gridAndGoals.startGoalPoints.sx;
sy = gridAndGoals.startGoalPoints.sy;
sx = 24; sy = 24;
dx = -1; dy = 2;
LineOfSightScanner losScanner = new LineOfSightScanner(gridGraph);
try {
// Expected running time: 500x500, blocked ratio 25 ==> 0.07ms to 0.1ms per iteration.
int iterations = 30000;
long start = System.nanoTime();
for (int i=0;i<iterations;++i) {
//losScanner.computeAllVisibleTautSuccessors(rand.nextInt(gridGraph.sizeX+1), rand.nextInt(gridGraph.sizeY+1));
//losScanner.clearSnapshots();
}
long end = System.nanoTime();
double totalTime = (end-start)/1000000.; // convert to milliseconds
System.out.println("Total Time: " + totalTime);
System.out.println("Per iteration time: " + (totalTime/iterations));
//losScanner.computeAllVisibleTwoWayTautSuccessors(sx, sy);
losScanner.computeAllVisibleSuccessors(sx, sy);
//losScanner.computeAllVisibleTautSuccessors(sx, sy);
//losScanner.computeAllVisibleIncrementalTautSuccessors(sx, sy, dx, dy);
} catch (Exception e) {
e.printStackTrace();
}
for (int i=0;i<losScanner.nSuccessors;++i) {
int x = losScanner.successorsX[i];
int y = losScanner.successorsY[i];
gridLineSet.addLine(sx, sy, x,y, Color.GREEN);
gridPointSet.addPoint(x, y, Color.RED);
}
//gridLineSet = generateRandomTestLines(gridGraph, 10);
gridPointSet.addPoint(sx, sy, Color.BLUE);
gridObjectsList.add(new GridObjects(gridLineSet, gridPointSet));
for (List<SnapshotItem> l : LineOfSightScanner.snapshotList) {
gridObjectsList.add(GridObjects.create(l));
}
}
DrawCanvas drawCanvas = new DrawCanvas(gridGraph, gridLineSet);
Visualisation.setupMainFrame(drawCanvas, gridObjectsList);
}
/**
* Generates random lines on the map. Used to test whether the line-of-sight
* algorithm is correct. Returns a gridLineSet containing all the test lines.
*/
private static GridLineSet generateRandomTestLines(GridGraph gridGraph,
int amount) {
GridLineSet gridLineSet = new GridLineSet();
Random rand = new Random();
for (int i=0; i<amount; i++) {
int x1 = rand.nextInt(gridGraph.sizeX);
int y1 = rand.nextInt(gridGraph.sizeY);
int x2 = rand.nextInt(gridGraph.sizeX);
int y2 = rand.nextInt(gridGraph.sizeY);
Experiment.testAndAddLine(x1,y1,x2,y2,gridGraph,gridLineSet);
}
return gridLineSet;
}
/**
* Tests a set of coordinates for line-of-sight. Adds a green line to the
* gridLineSet if there is line-of-sight between (x1,y1) and (x2,y2).
* Adds a red line otherwise.
*/
private static void testAndAddLine(int x1, int y1, int x2, int y2,
GridGraph gridGraph, GridLineSet gridLineSet) {
if (gridGraph.lineOfSight(x1, y1, x2, y2)) {
gridLineSet.addLine(x1, y1, x2, y2, Color.GREEN);
} else {
gridLineSet.addLine(x1, y1, x2, y2, Color.RED);
}
}
private static void testAgainstReferenceAlgorithm() {
AnyAnglePathfinding.setDefaultAlgoFunction();
AlgoFunction currentAlgo = AnyAnglePathfinding.setDefaultAlgoFunction();
AlgoFunction referenceAlgo = AStar::new;
Random seedRand = new Random(3);
int initial = seedRand.nextInt();
for (int i=0; i<500000; i++) {
int sizeX = seedRand.nextInt(70) + 10;
int sizeY = seedRand.nextInt(70) + 10;
int seed = i+initial;
int ratio = seedRand.nextInt(40) + 5;
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnlyOld(seed, sizeX, sizeY, ratio);
int[][] path = Utility.generatePath(referenceAlgo, gridGraph, sx, sy, ex, ey);
double referencePathLength = Utility.computePathLength(gridGraph, path);
boolean referenceValid = (referencePathLength > 0.00001f);
path = Utility.generatePath(currentAlgo, gridGraph, sx, sy, ex, ey);
double algoPathLength = Utility.computePathLength(gridGraph, path);
boolean algoValid = (referencePathLength > 0.00001f);
if (referenceValid != algoValid) {
System.out.println("============");
System.out.println("Validity Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Reference: " + referenceValid + " , Current: " + algoValid);
System.out.println("============");
throw new UnsupportedOperationException("DISCREPANCY!!");
} else {
if (Math.abs(algoPathLength - referencePathLength) > 0.0001) {
System.out.println("============");
System.out.println("Path Length Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Reference: " + referencePathLength + " , Current: " + algoPathLength);
System.out.println("============");
throw new UnsupportedOperationException("DISCREPANCY!!");
}
if (i%1000 == 0)
System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
}
}
}
/**
* Tests random generated maps of various sizes and obstacle densities for
* the size of the visibility graphs.<br>
* The results are output to the file VisibilityGraphSizes.txt.
*/
private static void testVisibilityGraphSize() {
FileIO fileIO = FileIO.csv(AnyAnglePathfinding.PATH_ANALYSISDATA + "VisibilityGraphSizes.csv");
Random seedGenerator = new Random(9191);
fileIO.writeRow("Seed", "Size", "UnblockedRatio", "%Blocked", "VG Vertices", "VG Edges (Directed)", "SVG Vertices", "SVG Edges (Directed)");
for (int i=0; i<50; i++) {
int currentSize = 10 + i*10;
for (int r=0; r<3; r++) {
int currentRatio = (r == 0 ? 7 : (r == 1 ? 15 : 50));
int currentSeed = seedGenerator.nextInt();
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(currentSeed, currentSize, currentSize, currentRatio, 0, 0, currentSize, currentSize);
String seedString = currentSeed + "";
String sizeString = currentSize + "";
String ratioString = currentRatio + "";
String perBlockedString = gridGraph.getPercentageBlocked()*100f + "";
VisibilityGraph vGraph = new VisibilityGraph(gridGraph, 0, 0, currentSize, currentSize);
vGraph.initialise();
String verticesString = vGraph.size() + "";
String edgesString = vGraph.computeSumDegrees() + "";
SparseVisibilityGraph svGraph = new SparseVisibilityGraph(gridGraph);
svGraph.initialise(0, 0, currentSize, currentSize);
String sverticesString = svGraph.size() + "";
String sedgesString = svGraph.computeSumDegrees() + "";
fileIO.writeRow(seedString, sizeString, ratioString, perBlockedString, verticesString, edgesString, sverticesString, sedgesString);
fileIO.flush();
}
}
fileIO.close();
}
private static void findUpperBound() {
System.out.println("Strict Theta Star");
AlgoFunction testAlgo = (gridGraph, sx, sy, ex, ey) -> new RecursiveStrictThetaStar(gridGraph, sx, sy, ex, ey);
AlgoFunction optimalAlgo = (gridGraph, sx, sy, ex, ey) -> new StrictVisibilityGraphAlgorithm(gridGraph, sx, sy, ex, ey);
double upperBound = 1.5;
double maxRatio = 1;
int wins = 0;
int ties = 0;
Random seedRand = new Random(-2814121L);
long initial = seedRand.nextLong();
for (int i=0; i>=0; i++) {
int sizeX = seedRand.nextInt(30 + (int)(Math.sqrt(i))) + 1;
int sizeY = seedRand.nextInt(10 + (int)(Math.sqrt(i))) + 1;
sizeX = seedRand.nextInt(50) + 1;
sizeY = seedRand.nextInt(30) + 1;
long seed = i+initial;
int ratio = seedRand.nextInt(60) + 1;
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
//GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(seed, sizeX, sizeY, ratio);
GridGraph gridGraph = DefaultGenerator.generateSeededTrueRandomGraphOnly(seed, sizeX, sizeY, ratio);
//gridGraph = GraphImporter.importGraphFromFile("custommaze4.txt");
//sx = 0; sy=0;ex=10+i;ey=2;
//if (ex > 22) break;
int[][] path = Utility.generatePath(testAlgo, gridGraph, sx, sy, ex, ey);
double testPathLength = Utility.computePathLength(gridGraph, path);
path = Utility.generatePath(optimalAlgo, gridGraph, sx, sy, ex, ey);
double optimalPathLength = Utility.computePathLength(gridGraph, path);
if (testPathLength > optimalPathLength*upperBound) {
System.out.println("============");
System.out.println("Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Test: " + testPathLength + " , Optimal: " + optimalPathLength);
System.out.println("Ratio: " + (testPathLength/optimalPathLength));
System.out.println("============");
System.out.println("WINS: " + wins + ", TIES: " + ties);
throw new UnsupportedOperationException("DISCREPANCY!!");
} else {
//System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
double lengthRatio = (double)testPathLength/optimalPathLength;
if (lengthRatio > maxRatio) {
//System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Test: " + testPathLength + " , Optimal: " + optimalPathLength);
System.out.println("Ratio: " + (testPathLength/optimalPathLength));
maxRatio = lengthRatio;
System.out.println(maxRatio);
}
}
}
//System.out.println(maxRatio);
}
private static void findStrictThetaStarIssues() {
AlgoFunction basicThetaStar = (gridGraph, sx, sy, ex, ey) -> new BasicThetaStar(gridGraph, sx, sy, ex, ey);;
AlgoFunction strictThetaStar = (gridGraph, sx, sy, ex, ey) -> new StrictThetaStar(gridGraph, sx, sy, ex, ey);
// AlgoFunction basicThetaStar = (gridGraph, sx, sy, ex, ey) -> RecursiveStrictThetaStar.setBuffer(gridGraph, sx, sy, ex, ey, 0.4f);
// AlgoFunction strictThetaStar = (gridGraph, sx, sy, ex, ey) -> RecursiveStrictThetaStar.setBuffer(gridGraph, sx, sy, ex, ey, 0.2f);
// AlgoFunction basicThetaStar = AnyAngleSubgoalGraphsAlgorithm::new;
// AlgoFunction strictThetaStar = RecursiveStrictAnyAngleSubgoalGraphsAlgorithm::new;
double sumBasic = 0;
double sumStrict = 0;
int wins = 0;
int ties = 0;
int losses = 0;
Random seedRand = new Random(-4418533);
int initial = seedRand.nextInt();
for (int i=0; i<500000; i++) {
int sizeX = seedRand.nextInt(60) + 8;
int sizeY = seedRand.nextInt(60) + 8;
int seed = i+initial;
int ratio = seedRand.nextInt(40) + 5;
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(seed, sizeX, sizeY, ratio);
int[][] path = Utility.generatePath(basicThetaStar, gridGraph, sx, sy, ex, ey);
double basicPathLength = Utility.computePathLength(gridGraph, path);
path = Utility.generatePath(strictThetaStar, gridGraph, sx, sy, ex, ey);
double strictPathLength = Utility.computePathLength(gridGraph, path);
sumBasic += basicPathLength;
sumStrict += strictPathLength;
if (basicPathLength < strictPathLength-0.01f) {
losses += 1;
System.out.println("============");
System.out.println("Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Basic: " + basicPathLength + " , Strict: " + strictPathLength);
System.out.println("============");
System.out.println("WINS: " + wins + ", TIES: " + ties + ", LOSSES: " + losses);
System.out.println("BASIC: " + sumBasic + ", STRICT: " + sumStrict);
System.out.println("Result: " + (sumBasic - sumStrict)/ (wins+losses+ties));
//throw new UnsupportedOperationException("DISCREPANCY!!");
} else {
//System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
if (strictPathLength < basicPathLength - 0.01f) {
wins += 1;
} else {
ties += 1;
}
}
}
}
private static void testAlgorithmOptimality() {
//AlgoFunction rVGA = (gridGraph, sx, sy, ex, ey) -> new RestrictedVisibilityGraphAlgorithm(gridGraph, sx, sy, ex, ey);
//AlgoFunction rVGA = (gridGraph, sx, sy, ex, ey) -> new VisibilityGraphAlgorithm(gridGraph, sx, sy, ex, ey);
//AlgoFunction rVGA = (gridGraph, sx, sy, ex, ey) -> new StrictVisibilityGraphAlgorithm(gridGraph, sx, sy, ex, ey);
AlgoFunction testAlgo = Anya::new;
AlgoFunction refAlgo = StrictVisibilityGraphAlgorithmV2::new;
//AlgoFunction VGA = (gridGraph, sx, sy, ex, ey) -> VisibilityGraphAlgorithm.graphReuseNoHeuristic(gridGraph, sx, sy, ex, ey);
//printSeed = false; // keep this commented out.
Random seedRand = new Random(-2059321351);
int initial = seedRand.nextInt();
for (int i=0; i<50000000; i++) {
int sizeX = seedRand.nextInt(300) + 10;
int sizeY = seedRand.nextInt(300) + 10;
int seed = i+initial;
int ratio = seedRand.nextInt(50) + 10;
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
double restPathLength = 0, normalPathLength = 0;
try {
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(seed, sizeX, sizeY, ratio);
//for (int iii=0;iii<100;++iii) Utility.generatePath(testAlgo, gridGraph, seedRand.nextInt(sizeX+1),seedRand.nextInt(sizeY+1),seedRand.nextInt(sizeX+1),seedRand.nextInt(sizeY+1));
int[][] path = Utility.generatePath(testAlgo, gridGraph, sx, sy, ex, ey);
path = Utility.removeDuplicatesInPath(path);
restPathLength = Utility.computePathLength(gridGraph, path);
path = Utility.generatePath(refAlgo, gridGraph, sx, sy, ex, ey);
path = Utility.removeDuplicatesInPath(path);
normalPathLength = Utility.computePathLength(gridGraph, path);
}catch (Exception e) {
e.printStackTrace();
System.out.println("EXCEPTION OCCURRED!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
throw new UnsupportedOperationException("DISCREPANCY!!");
}
if (Math.abs(restPathLength - normalPathLength) > 0.000001f) {
System.out.println("============");
System.out.println("Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Actual: " + restPathLength + " , Expected: " + normalPathLength);
System.out.println(restPathLength / normalPathLength);
System.out.println("============");
throw new UnsupportedOperationException("DISCREPANCY!!");
} else {
if (i%10000 == 9999) {
System.out.println("Count: " + (i+1));
System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Actual: " + restPathLength + " , Expected: " + normalPathLength);
}
}
}
}
private static boolean testTautness(GridGraph gridGraph, AlgoFunction algo, int sx, int sy, int ex, int ey) {
int[][] path = Utility.generatePath(algo, gridGraph, sx, sy, ex, ey);
path = Utility.removeDuplicatesInPath(path);
return Utility.isPathTaut(gridGraph, path);
}
private static boolean hasSolution(GridGraph gridGraph, AlgoFunction algo, int sx, int sy, int ex, int ey) {
int[][] path = Utility.generatePath(algo, gridGraph, sx, sy, ex, ey);
return path.length > 1;
}
private static void countTautPaths() {
int nTaut1=0;int nTaut2=0; int nTaut3=0;
AlgoFunction hasPathChecker = JumpPointSearch::new;
AlgoFunction algo3 = BasicThetaStar::new;
AlgoFunction algo2 = StrictThetaStar::new;
AlgoFunction algo1 = RecursiveStrictThetaStar::new;
//printSeed = false; // keep this commented out.
int pathsPerGraph = 100;
int nIterations = 100000;
int nPaths = 0;
String[] maps = {
"obst10_random512-10-0",
"obst10_random512-10-1",
"obst10_random512-10-2",
"obst10_random512-10-3",
"obst10_random512-10-4",
"obst10_random512-10-5",
"obst10_random512-10-6",
"obst10_random512-10-7",
"obst10_random512-10-8",
"obst10_random512-10-9",
"obst40_random512-40-0",
"obst40_random512-40-1",
"obst40_random512-40-2",
"obst40_random512-40-3",
"obst40_random512-40-4",
"obst40_random512-40-5",
"obst40_random512-40-6",
"obst40_random512-40-7",
"obst40_random512-40-8",
"obst40_random512-40-9"
};
nIterations = maps.length;
System.out.println(maps.length);
Random seedRand = new Random(-14);
int initial = seedRand.nextInt();
for (int i=0;i<nIterations;++i) {
String map = maps[i];
System.out.println("Map: " + map);
GridGraph gridGraph = GraphImporter.loadStoredMaze(map);
int sizeX = gridGraph.sizeX;
int sizeY = gridGraph.sizeY;
/*int sizeX = seedRand.nextInt(20) + 300;
int sizeY = seedRand.nextInt(20) + 300;
int seed = i+initial;
int ratio = seedRand.nextInt(30) + 5;
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(seed, sizeX, sizeY, ratio);
System.out.println("Ratio: " + ratio);*/
for (int j=0;j<pathsPerGraph;++j) {
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
if (hasSolution(gridGraph, hasPathChecker, sx, sy, ex, ey)) {
if (testTautness(gridGraph, algo1, sx,sy,ex,ey)) nTaut1++;
if (testTautness(gridGraph, algo2, sx,sy,ex,ey)) nTaut2++;
if (testTautness(gridGraph, algo3, sx,sy,ex,ey)) nTaut3++;
nPaths++;
} else {
j--;
}
}
System.out.println("Total = " + nPaths);
System.out.println("1: " + ((float)nTaut1/nPaths));
System.out.println("2: " + ((float)nTaut2/nPaths));
System.out.println("3: " + ((float)nTaut3/nPaths));
}
}
}
| src/main/Experiment.java | package main;
import java.awt.Color;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import algorithms.AStar;
import algorithms.Anya;
import algorithms.BasicThetaStar;
import algorithms.JumpPointSearch;
import algorithms.StrictVisibilityGraphAlgorithm;
import algorithms.StrictVisibilityGraphAlgorithmV2;
import algorithms.datatypes.Point;
import algorithms.datatypes.SnapshotItem;
import algorithms.sparsevgs.LineOfSightScanner;
import algorithms.sparsevgs.SparseVisibilityGraph;
import algorithms.strictthetastar.RecursiveStrictThetaStar;
import algorithms.strictthetastar.StrictThetaStar;
import algorithms.visibilitygraph.VisibilityGraph;
import draw.DrawCanvas;
import draw.GridLineSet;
import draw.GridObjects;
import draw.GridPointSet;
import grid.GridAndGoals;
import grid.GridGraph;
import grid.ReachableNodes;
import grid.StartGoalPoints;
import main.graphgeneration.DefaultGenerator;
import main.utility.Utility;
import uiandio.FileIO;
import uiandio.GraphImporter;
public class Experiment {
public static void run() {
// testVisibilityGraphSize();
// testAbilityToFindGoal();
// findStrictThetaStarIssues();
// findUpperBound();
// testAlgorithmOptimality();
//testAgainstReferenceAlgorithm();
//countTautPaths();
// other();
testLOSScan();
}
/**
* Custom code for experiments.
*/
public static void other() {
// This is how to generate test data for the grid. (Use the VisibilityGraph algorithm to generate optimal path lengths)
// ArrayList<Point> points = ReachableNodes.computeReachable(gridGraph, 5, 5);
// System.out.println(points.size());
//
// generateRandomTestDataAndPrint(gridGraph);
//This is how to conduct a running time / path length test for tha algorithm:
// TestResult test1 = testAlgorithm(gridGraph, sx, sy, ex, ey, 1, 1);
// System.out.println(test1);
// TestResult test2 = testAlgorithm(gridGraph, sx, sy, ex, ey, 30, 25);
// System.out.println(test2);
}
/**
* Generates and prints out random test data for the gridGraph in question. <br>
* Note: the algorithm used is the one specified in the algoFunction.
* Use setDefaultAlgoFunction to choose the algorithm.
* @param gridGraph the grid to test.
*/
private static void generateRandomTestDataAndPrint(GridGraph gridGraph) {
AlgoFunction algo = AnyAnglePathfinding.setDefaultAlgoFunction();
ArrayList<Point> points = ReachableNodes.computeReachable(gridGraph, 5, 5);
LinkedList<Integer> startX = new LinkedList<>();
LinkedList<Integer> startY = new LinkedList<>();
LinkedList<Integer> endX = new LinkedList<>();
LinkedList<Integer> endY = new LinkedList<>();
LinkedList<Double> length = new LinkedList<>();
int size = points.size();
System.out.println("Points: " + size);
for (int i=0; i<100; i++) {
Random random = new Random();
int first = random.nextInt(size);
int last = random.nextInt(size-1);
if (last == first) last = size-1; // prevent first and last from being the same
Point s = points.get(first);
Point f = points.get(last);
int[][] path = Utility.generatePath(algo, gridGraph, s.x, s.y, f.x, f.y);
if (path.length >= 2) {
double len = Utility.computePathLength(gridGraph, path);
startX.offer(s.x);
startY.offer(s.y);
endX.offer(f.x);
endY.offer(f.y);
length.offer(len);
}
if (i%10 == 0) System.out.println("Computed: " + i);
}
System.out.println(startX);
System.out.println(startY);
System.out.println(endX);
System.out.println(endY);
System.out.println(length);
}
/**
* Returns true iff there is a path from the start to the end. Uses the current algorithm to check.<br>
* Use setDefaultAlgoFunction to choose the algorithm.
*/
private static boolean hasSolution(AlgoFunction algo, GridGraph gridGraph, StartGoalPoints p) {
int[][] path = Utility.generatePath(algo, gridGraph, p.sx, p.sy, p.ex, p.ey);
return path.length > 1;
}
private static void testLOSScan() {
GridAndGoals gridAndGoals = AnyAnglePathfinding.loadMaze();
GridGraph gridGraph = gridAndGoals.gridGraph;
ArrayList<GridObjects> gridObjectsList = new ArrayList<>();
GridLineSet gridLineSet = new GridLineSet();;
GridPointSet gridPointSet = new GridPointSet();
int dx, dy;
Random rand = new Random();
{
int sx = rand.nextInt(gridGraph.sizeX+1);
int sy = rand.nextInt(gridGraph.sizeY+1);
sx = gridAndGoals.startGoalPoints.sx;
sy = gridAndGoals.startGoalPoints.sy;
sx = 24; sy = 24;
dx = -1; dy = 2;
LineOfSightScanner losScanner = new LineOfSightScanner(gridGraph);
try {
// Expected running time: 500x500, blocked ratio 25 ==> 0.07ms to 0.1ms per iteration.
int iterations = 30000;
long start = System.nanoTime();
for (int i=0;i<iterations;++i) {
//losScanner.computeAllVisibleTautSuccessors(rand.nextInt(gridGraph.sizeX+1), rand.nextInt(gridGraph.sizeY+1));
//losScanner.clearSnapshots();
}
long end = System.nanoTime();
double totalTime = (end-start)/1000000.; // convert to milliseconds
System.out.println("Total Time: " + totalTime);
System.out.println("Per iteration time: " + (totalTime/iterations));
//losScanner.computeAllVisibleTwoWayTautSuccessors(sx, sy);
//losScanner.computeAllVisibleTautSuccessors(sx, sy);
losScanner.computeAllVisibleIncrementalTautSuccessors(sx, sy, dx, dy);
} catch (Exception e) {
e.printStackTrace();
}
for (int i=0;i<losScanner.nSuccessors;++i) {
int x = losScanner.successorsX[i];
int y = losScanner.successorsY[i];
gridLineSet.addLine(sx, sy, x,y, Color.GREEN);
gridPointSet.addPoint(x, y, Color.RED);
}
//gridLineSet = generateRandomTestLines(gridGraph, 10);
gridPointSet.addPoint(sx, sy, Color.BLUE);
gridObjectsList.add(new GridObjects(gridLineSet, gridPointSet));
for (List<SnapshotItem> l : LineOfSightScanner.snapshotList) {
gridObjectsList.add(GridObjects.create(l));
}
}
DrawCanvas drawCanvas = new DrawCanvas(gridGraph, gridLineSet);
Visualisation.setupMainFrame(drawCanvas, gridObjectsList);
}
/**
* Generates random lines on the map. Used to test whether the line-of-sight
* algorithm is correct. Returns a gridLineSet containing all the test lines.
*/
private static GridLineSet generateRandomTestLines(GridGraph gridGraph,
int amount) {
GridLineSet gridLineSet = new GridLineSet();
Random rand = new Random();
for (int i=0; i<amount; i++) {
int x1 = rand.nextInt(gridGraph.sizeX);
int y1 = rand.nextInt(gridGraph.sizeY);
int x2 = rand.nextInt(gridGraph.sizeX);
int y2 = rand.nextInt(gridGraph.sizeY);
Experiment.testAndAddLine(x1,y1,x2,y2,gridGraph,gridLineSet);
}
return gridLineSet;
}
/**
* Tests a set of coordinates for line-of-sight. Adds a green line to the
* gridLineSet if there is line-of-sight between (x1,y1) and (x2,y2).
* Adds a red line otherwise.
*/
private static void testAndAddLine(int x1, int y1, int x2, int y2,
GridGraph gridGraph, GridLineSet gridLineSet) {
if (gridGraph.lineOfSight(x1, y1, x2, y2)) {
gridLineSet.addLine(x1, y1, x2, y2, Color.GREEN);
} else {
gridLineSet.addLine(x1, y1, x2, y2, Color.RED);
}
}
private static void testAgainstReferenceAlgorithm() {
AnyAnglePathfinding.setDefaultAlgoFunction();
AlgoFunction currentAlgo = AnyAnglePathfinding.setDefaultAlgoFunction();
AlgoFunction referenceAlgo = AStar::new;
Random seedRand = new Random(3);
int initial = seedRand.nextInt();
for (int i=0; i<500000; i++) {
int sizeX = seedRand.nextInt(70) + 10;
int sizeY = seedRand.nextInt(70) + 10;
int seed = i+initial;
int ratio = seedRand.nextInt(40) + 5;
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnlyOld(seed, sizeX, sizeY, ratio);
int[][] path = Utility.generatePath(referenceAlgo, gridGraph, sx, sy, ex, ey);
double referencePathLength = Utility.computePathLength(gridGraph, path);
boolean referenceValid = (referencePathLength > 0.00001f);
path = Utility.generatePath(currentAlgo, gridGraph, sx, sy, ex, ey);
double algoPathLength = Utility.computePathLength(gridGraph, path);
boolean algoValid = (referencePathLength > 0.00001f);
if (referenceValid != algoValid) {
System.out.println("============");
System.out.println("Validity Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Reference: " + referenceValid + " , Current: " + algoValid);
System.out.println("============");
throw new UnsupportedOperationException("DISCREPANCY!!");
} else {
if (Math.abs(algoPathLength - referencePathLength) > 0.0001) {
System.out.println("============");
System.out.println("Path Length Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Reference: " + referencePathLength + " , Current: " + algoPathLength);
System.out.println("============");
throw new UnsupportedOperationException("DISCREPANCY!!");
}
if (i%1000 == 0)
System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
}
}
}
/**
* Tests random generated maps of various sizes and obstacle densities for
* the size of the visibility graphs.<br>
* The results are output to the file VisibilityGraphSizes.txt.
*/
private static void testVisibilityGraphSize() {
FileIO fileIO = FileIO.csv(AnyAnglePathfinding.PATH_ANALYSISDATA + "VisibilityGraphSizes.csv");
Random seedGenerator = new Random(9191);
fileIO.writeRow("Seed", "Size", "UnblockedRatio", "%Blocked", "VG Vertices", "VG Edges (Directed)", "SVG Vertices", "SVG Edges (Directed)");
for (int i=0; i<50; i++) {
int currentSize = 10 + i*10;
for (int r=0; r<3; r++) {
int currentRatio = (r == 0 ? 7 : (r == 1 ? 15 : 50));
int currentSeed = seedGenerator.nextInt();
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(currentSeed, currentSize, currentSize, currentRatio, 0, 0, currentSize, currentSize);
String seedString = currentSeed + "";
String sizeString = currentSize + "";
String ratioString = currentRatio + "";
String perBlockedString = gridGraph.getPercentageBlocked()*100f + "";
VisibilityGraph vGraph = new VisibilityGraph(gridGraph, 0, 0, currentSize, currentSize);
vGraph.initialise();
String verticesString = vGraph.size() + "";
String edgesString = vGraph.computeSumDegrees() + "";
SparseVisibilityGraph svGraph = new SparseVisibilityGraph(gridGraph);
svGraph.initialise(0, 0, currentSize, currentSize);
String sverticesString = svGraph.size() + "";
String sedgesString = svGraph.computeSumDegrees() + "";
fileIO.writeRow(seedString, sizeString, ratioString, perBlockedString, verticesString, edgesString, sverticesString, sedgesString);
fileIO.flush();
}
}
fileIO.close();
}
private static void findUpperBound() {
System.out.println("Strict Theta Star");
AlgoFunction testAlgo = (gridGraph, sx, sy, ex, ey) -> new RecursiveStrictThetaStar(gridGraph, sx, sy, ex, ey);
AlgoFunction optimalAlgo = (gridGraph, sx, sy, ex, ey) -> new StrictVisibilityGraphAlgorithm(gridGraph, sx, sy, ex, ey);
double upperBound = 1.5;
double maxRatio = 1;
int wins = 0;
int ties = 0;
Random seedRand = new Random(-2814121L);
long initial = seedRand.nextLong();
for (int i=0; i>=0; i++) {
int sizeX = seedRand.nextInt(30 + (int)(Math.sqrt(i))) + 1;
int sizeY = seedRand.nextInt(10 + (int)(Math.sqrt(i))) + 1;
sizeX = seedRand.nextInt(50) + 1;
sizeY = seedRand.nextInt(30) + 1;
long seed = i+initial;
int ratio = seedRand.nextInt(60) + 1;
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
//GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(seed, sizeX, sizeY, ratio);
GridGraph gridGraph = DefaultGenerator.generateSeededTrueRandomGraphOnly(seed, sizeX, sizeY, ratio);
//gridGraph = GraphImporter.importGraphFromFile("custommaze4.txt");
//sx = 0; sy=0;ex=10+i;ey=2;
//if (ex > 22) break;
int[][] path = Utility.generatePath(testAlgo, gridGraph, sx, sy, ex, ey);
double testPathLength = Utility.computePathLength(gridGraph, path);
path = Utility.generatePath(optimalAlgo, gridGraph, sx, sy, ex, ey);
double optimalPathLength = Utility.computePathLength(gridGraph, path);
if (testPathLength > optimalPathLength*upperBound) {
System.out.println("============");
System.out.println("Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Test: " + testPathLength + " , Optimal: " + optimalPathLength);
System.out.println("Ratio: " + (testPathLength/optimalPathLength));
System.out.println("============");
System.out.println("WINS: " + wins + ", TIES: " + ties);
throw new UnsupportedOperationException("DISCREPANCY!!");
} else {
//System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
double lengthRatio = (double)testPathLength/optimalPathLength;
if (lengthRatio > maxRatio) {
//System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Test: " + testPathLength + " , Optimal: " + optimalPathLength);
System.out.println("Ratio: " + (testPathLength/optimalPathLength));
maxRatio = lengthRatio;
System.out.println(maxRatio);
}
}
}
//System.out.println(maxRatio);
}
private static void findStrictThetaStarIssues() {
AlgoFunction basicThetaStar = (gridGraph, sx, sy, ex, ey) -> new BasicThetaStar(gridGraph, sx, sy, ex, ey);;
AlgoFunction strictThetaStar = (gridGraph, sx, sy, ex, ey) -> new StrictThetaStar(gridGraph, sx, sy, ex, ey);
// AlgoFunction basicThetaStar = (gridGraph, sx, sy, ex, ey) -> RecursiveStrictThetaStar.setBuffer(gridGraph, sx, sy, ex, ey, 0.4f);
// AlgoFunction strictThetaStar = (gridGraph, sx, sy, ex, ey) -> RecursiveStrictThetaStar.setBuffer(gridGraph, sx, sy, ex, ey, 0.2f);
// AlgoFunction basicThetaStar = AnyAngleSubgoalGraphsAlgorithm::new;
// AlgoFunction strictThetaStar = RecursiveStrictAnyAngleSubgoalGraphsAlgorithm::new;
double sumBasic = 0;
double sumStrict = 0;
int wins = 0;
int ties = 0;
int losses = 0;
Random seedRand = new Random(-4418533);
int initial = seedRand.nextInt();
for (int i=0; i<500000; i++) {
int sizeX = seedRand.nextInt(60) + 8;
int sizeY = seedRand.nextInt(60) + 8;
int seed = i+initial;
int ratio = seedRand.nextInt(40) + 5;
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(seed, sizeX, sizeY, ratio);
int[][] path = Utility.generatePath(basicThetaStar, gridGraph, sx, sy, ex, ey);
double basicPathLength = Utility.computePathLength(gridGraph, path);
path = Utility.generatePath(strictThetaStar, gridGraph, sx, sy, ex, ey);
double strictPathLength = Utility.computePathLength(gridGraph, path);
sumBasic += basicPathLength;
sumStrict += strictPathLength;
if (basicPathLength < strictPathLength-0.01f) {
losses += 1;
System.out.println("============");
System.out.println("Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Basic: " + basicPathLength + " , Strict: " + strictPathLength);
System.out.println("============");
System.out.println("WINS: " + wins + ", TIES: " + ties + ", LOSSES: " + losses);
System.out.println("BASIC: " + sumBasic + ", STRICT: " + sumStrict);
System.out.println("Result: " + (sumBasic - sumStrict)/ (wins+losses+ties));
//throw new UnsupportedOperationException("DISCREPANCY!!");
} else {
//System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
if (strictPathLength < basicPathLength - 0.01f) {
wins += 1;
} else {
ties += 1;
}
}
}
}
private static void testAlgorithmOptimality() {
//AlgoFunction rVGA = (gridGraph, sx, sy, ex, ey) -> new RestrictedVisibilityGraphAlgorithm(gridGraph, sx, sy, ex, ey);
//AlgoFunction rVGA = (gridGraph, sx, sy, ex, ey) -> new VisibilityGraphAlgorithm(gridGraph, sx, sy, ex, ey);
//AlgoFunction rVGA = (gridGraph, sx, sy, ex, ey) -> new StrictVisibilityGraphAlgorithm(gridGraph, sx, sy, ex, ey);
AlgoFunction testAlgo = Anya::new;
AlgoFunction refAlgo = StrictVisibilityGraphAlgorithmV2::new;
//AlgoFunction VGA = (gridGraph, sx, sy, ex, ey) -> VisibilityGraphAlgorithm.graphReuseNoHeuristic(gridGraph, sx, sy, ex, ey);
//printSeed = false; // keep this commented out.
Random seedRand = new Random(-2059321351);
int initial = seedRand.nextInt();
for (int i=0; i<50000000; i++) {
int sizeX = seedRand.nextInt(300) + 10;
int sizeY = seedRand.nextInt(300) + 10;
int seed = i+initial;
int ratio = seedRand.nextInt(50) + 10;
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
double restPathLength = 0, normalPathLength = 0;
try {
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(seed, sizeX, sizeY, ratio);
//for (int iii=0;iii<100;++iii) Utility.generatePath(testAlgo, gridGraph, seedRand.nextInt(sizeX+1),seedRand.nextInt(sizeY+1),seedRand.nextInt(sizeX+1),seedRand.nextInt(sizeY+1));
int[][] path = Utility.generatePath(testAlgo, gridGraph, sx, sy, ex, ey);
path = Utility.removeDuplicatesInPath(path);
restPathLength = Utility.computePathLength(gridGraph, path);
path = Utility.generatePath(refAlgo, gridGraph, sx, sy, ex, ey);
path = Utility.removeDuplicatesInPath(path);
normalPathLength = Utility.computePathLength(gridGraph, path);
}catch (Exception e) {
e.printStackTrace();
System.out.println("EXCEPTION OCCURRED!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
throw new UnsupportedOperationException("DISCREPANCY!!");
}
if (Math.abs(restPathLength - normalPathLength) > 0.000001f) {
System.out.println("============");
System.out.println("Discrepancy Discovered!");
System.out.println("Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Start = " + sx + "," + sy + " End = " + ex + "," + ey);
System.out.println("Actual: " + restPathLength + " , Expected: " + normalPathLength);
System.out.println(restPathLength / normalPathLength);
System.out.println("============");
throw new UnsupportedOperationException("DISCREPANCY!!");
} else {
if (i%10000 == 9999) {
System.out.println("Count: " + (i+1));
System.out.println("OK: Seed = " + seed +" , Ratio = " + ratio + " , Size: x=" + sizeX + " y=" + sizeY);
System.out.println("Actual: " + restPathLength + " , Expected: " + normalPathLength);
}
}
}
}
private static boolean testTautness(GridGraph gridGraph, AlgoFunction algo, int sx, int sy, int ex, int ey) {
int[][] path = Utility.generatePath(algo, gridGraph, sx, sy, ex, ey);
path = Utility.removeDuplicatesInPath(path);
return Utility.isPathTaut(gridGraph, path);
}
private static boolean hasSolution(GridGraph gridGraph, AlgoFunction algo, int sx, int sy, int ex, int ey) {
int[][] path = Utility.generatePath(algo, gridGraph, sx, sy, ex, ey);
return path.length > 1;
}
private static void countTautPaths() {
int nTaut1=0;int nTaut2=0; int nTaut3=0;
AlgoFunction hasPathChecker = JumpPointSearch::new;
AlgoFunction algo3 = BasicThetaStar::new;
AlgoFunction algo2 = StrictThetaStar::new;
AlgoFunction algo1 = RecursiveStrictThetaStar::new;
//printSeed = false; // keep this commented out.
int pathsPerGraph = 100;
int nIterations = 100000;
int nPaths = 0;
String[] maps = {
"obst10_random512-10-0",
"obst10_random512-10-1",
"obst10_random512-10-2",
"obst10_random512-10-3",
"obst10_random512-10-4",
"obst10_random512-10-5",
"obst10_random512-10-6",
"obst10_random512-10-7",
"obst10_random512-10-8",
"obst10_random512-10-9",
"obst40_random512-40-0",
"obst40_random512-40-1",
"obst40_random512-40-2",
"obst40_random512-40-3",
"obst40_random512-40-4",
"obst40_random512-40-5",
"obst40_random512-40-6",
"obst40_random512-40-7",
"obst40_random512-40-8",
"obst40_random512-40-9"
};
nIterations = maps.length;
System.out.println(maps.length);
Random seedRand = new Random(-14);
int initial = seedRand.nextInt();
for (int i=0;i<nIterations;++i) {
String map = maps[i];
System.out.println("Map: " + map);
GridGraph gridGraph = GraphImporter.loadStoredMaze(map);
int sizeX = gridGraph.sizeX;
int sizeY = gridGraph.sizeY;
/*int sizeX = seedRand.nextInt(20) + 300;
int sizeY = seedRand.nextInt(20) + 300;
int seed = i+initial;
int ratio = seedRand.nextInt(30) + 5;
GridGraph gridGraph = DefaultGenerator.generateSeededGraphOnly(seed, sizeX, sizeY, ratio);
System.out.println("Ratio: " + ratio);*/
for (int j=0;j<pathsPerGraph;++j) {
int max = (sizeX+1)*(sizeY+1);
int p1 = seedRand.nextInt(max);
int p2 = seedRand.nextInt(max-1);
if (p2 == p1) {
p2 = max-1;
}
int sx = p1%(sizeX+1);
int sy = p1/(sizeX+1);
int ex = p2%(sizeX+1);
int ey = p2/(sizeX+1);
if (hasSolution(gridGraph, hasPathChecker, sx, sy, ex, ey)) {
if (testTautness(gridGraph, algo1, sx,sy,ex,ey)) nTaut1++;
if (testTautness(gridGraph, algo2, sx,sy,ex,ey)) nTaut2++;
if (testTautness(gridGraph, algo3, sx,sy,ex,ey)) nTaut3++;
nPaths++;
} else {
j--;
}
}
System.out.println("Total = " + nPaths);
System.out.println("1: " + ((float)nTaut1/nPaths));
System.out.println("2: " + ((float)nTaut2/nPaths));
System.out.println("3: " + ((float)nTaut3/nPaths));
}
}
}
| Add test for computeAllVisibibleSuccessors in Experiment.java
| src/main/Experiment.java | Add test for computeAllVisibibleSuccessors in Experiment.java | <ide><path>rc/main/Experiment.java
<ide> System.out.println("Per iteration time: " + (totalTime/iterations));
<ide>
<ide> //losScanner.computeAllVisibleTwoWayTautSuccessors(sx, sy);
<add> losScanner.computeAllVisibleSuccessors(sx, sy);
<ide> //losScanner.computeAllVisibleTautSuccessors(sx, sy);
<del> losScanner.computeAllVisibleIncrementalTautSuccessors(sx, sy, dx, dy);
<add> //losScanner.computeAllVisibleIncrementalTautSuccessors(sx, sy, dx, dy);
<ide> } catch (Exception e) {
<ide> e.printStackTrace();
<ide> } |
|
JavaScript | bsd-2-clause | d1ed996204bedf61d8fa4e88ce242b43439e3759 | 0 | erpframework/oboe.js,Mosoc/oboe.js,wbh5/oboe.js,ethanresnick/oboe.js |
/*
Tests that calling the public api gets through correctly to the writing
correctly. streamingXhr is a stub so no actual calls are made.
Technically this tests some of instanceController.js as well as publicApi.js but the tests were
written before the logic was split into two.
*/
describe("public api", function(){
"use strict";
describe("propagates through to wiring function", function(){
beforeEach(function() {
spyOn(window, 'wire');
});
it('exports a usable function for GETs', function(){
oboe('http://example.com/oboez')
expect(wire).toHaveBeenCalledLike(
'GET',
'http://example.com/oboez'
)
})
describe('get', function(){
it('works via arguments', function(){
oboe('http://example.com/oboez')
expect(wire).toHaveBeenCalledLike(
'GET',
'http://example.com/oboez'
)
})
it('works via options object', function(){
oboe({url: 'http://example.com/oboez'})
expect(wire).toHaveBeenCalledLike(
'GET',
'http://example.com/oboez'
)
})
it('propogates headers', function(){
var headers = {'X-HEADER-1':'value1', 'X-HEADER-2':'value2'};
oboe({url: 'http://example.com/oboez',
method:'GET',
headers:headers})
expect(wire).toHaveBeenCalledLike(
'GET',
'http://example.com/oboez',
undefined,
headers
)
})
});
describe('delete', function(){
it('works via options object', function(){
oboe({url: 'http://example.com/oboez',
method: 'DELETE'})
expect(wire).toHaveBeenCalledLike(
'DELETE',
'http://example.com/oboez'
)
})
});
describe('post', function(){
it('can post an object', function(){
oboe({ method:'POST',
url:'http://example.com/oboez',
body:[1,2,3,4,5]
})
expect(wire).toHaveBeenCalledLike(
'POST',
'http://example.com/oboez',
[1,2,3,4,5]
)
})
it('can post a string', function(){
oboe({ method:'POST',
url:'http://example.com/oboez',
body:'my_data'
})
expect(wire).toHaveBeenCalledLike(
'POST',
'http://example.com/oboez',
'my_data'
)
})
});
describe('put', function(){
it('can put a string', function(){
oboe({ method:'PUT',
url:'http://example.com/oboez',
'body':'my_data'})
expect(wire).toHaveBeenCalledLike(
'PUT',
'http://example.com/oboez',
'my_data'
)
})
});
describe('patch', function(){
it('can patch a string', function(){
oboe({url:'http://example.com/oboez',
body:'my_data',
method:'PATCH'});
expect(wire).toHaveBeenCalledLike(
'PATCH',
'http://example.com/oboez',
'my_data'
)
})
})
});
this.beforeEach(function(){
this.addMatchers({
/* Under Jasmine's toHaveBeenCalledLike, subject(foo, undefined)
is considered different from subject(foo). This is slightly
looser and considers those equal.
*/
toHaveBeenCalledLike:function(/*expectedArgs*/){
var expectedArgs = Array.prototype.slice.apply(arguments);
var actualCalls = this.actual.calls;
var equals = this.env.equals_.bind(this.env);
this.message = function() {
var invertedMessage = "Expected spy " + this.actual.identity + " not to have been called like " + jasmine.pp(expectedArgs) + " but it was.";
var positiveMessage = "";
if (this.actual.callCount === 0) {
positiveMessage = "Expected spy " + this.actual.identity + " to have been called like " + jasmine.pp(expectedArgs) + " but it was never called.";
} else {
positiveMessage = "Expected spy " + this.actual.identity + " to have been called like " + jasmine.pp(expectedArgs) + " but actual calls were " + jasmine.pp(this.actual.argsForCall).replace(/^\[ | \]$/g, '')
}
return [positiveMessage, invertedMessage];
};
return actualCalls.some(function( actualCall ){
var actualArgs = actualCall.args;
// check for one too many arguments given. But this is ok
// if the extra arg is undefined.
if( actualArgs[expectedArgs.length] != undefined ) {
return false;
}
return expectedArgs.every(function( expectedArg, index ){
return equals( actualArgs[index], expectedArg );
});
});
}
});
})
});
| test/specs/publicApi.unit.spec.js |
/*
Tests that calling the public api gets through correctly to the writing
correctly. streamingXhr is a stub so no actual calls are made.
Technically this tests some of instanceController.js as well as publicApi.js but the tests were
written before the logic was split into two.
*/
describe("public api", function(){
"use strict";
this.beforeEach(function(){
this.addMatchers({
/* Under Jasmine's toHaveBeenCalledLike, subject(foo, undefined)
is considered different from subject(foo). This is slightly
looser and considers those equal.
*/
toHaveBeenCalledLike:function(/*expectedArgs*/){
var expectedArgs = Array.prototype.slice.apply(arguments);
var actualCalls = this.actual.calls;
var equals = this.env.equals_.bind(this.env);
this.message = function() {
var invertedMessage = "Expected spy " + this.actual.identity + " not to have been called like " + jasmine.pp(expectedArgs) + " but it was.";
var positiveMessage = "";
if (this.actual.callCount === 0) {
positiveMessage = "Expected spy " + this.actual.identity + " to have been called like " + jasmine.pp(expectedArgs) + " but it was never called.";
} else {
positiveMessage = "Expected spy " + this.actual.identity + " to have been called like " + jasmine.pp(expectedArgs) + " but actual calls were " + jasmine.pp(this.actual.argsForCall).replace(/^\[ | \]$/g, '')
}
return [positiveMessage, invertedMessage];
};
return actualCalls.some(function( actualCall ){
var actualArgs = actualCall.args;
// check for one too many arguments given. But this is ok
// if the extra arg is undefined.
if( actualArgs[expectedArgs.length] != undefined ) {
return false;
}
return expectedArgs.every(function( expectedArg, index ){
return equals( actualArgs[index], expectedArg );
});
});
}
});
})
describe("propagates through to wiring function", function(){
beforeEach(function() {
spyOn(window, 'wire');
});
it('exports a usable function for GETs', function(){
oboe('http://example.com/oboez')
expect(wire).toHaveBeenCalledLike(
'GET',
'http://example.com/oboez'
)
})
describe('get', function(){
it('works via arguments', function(){
oboe('http://example.com/oboez')
expect(wire).toHaveBeenCalledLike(
'GET',
'http://example.com/oboez'
)
})
it('works via options object', function(){
oboe({url: 'http://example.com/oboez'})
expect(wire).toHaveBeenCalledLike(
'GET',
'http://example.com/oboez'
)
})
it('propogates headers', function(){
var headers = {'X-HEADER-1':'value1', 'X-HEADER-2':'value2'};
oboe({url: 'http://example.com/oboez',
method:'GET',
headers:headers})
expect(wire).toHaveBeenCalledLike(
'GET',
'http://example.com/oboez',
undefined,
headers
)
})
});
describe('delete', function(){
it('works via options object', function(){
oboe({url: 'http://example.com/oboez',
method: 'DELETE'})
expect(wire).toHaveBeenCalledLike(
'DELETE',
'http://example.com/oboez'
)
})
});
describe('post', function(){
it('can post an object', function(){
oboe({ method:'POST',
url:'http://example.com/oboez',
body:[1,2,3,4,5]
})
expect(wire).toHaveBeenCalledLike(
'POST',
'http://example.com/oboez',
[1,2,3,4,5]
)
})
it('can post a string', function(){
oboe({ method:'POST',
url:'http://example.com/oboez',
body:'my_data'
})
expect(wire).toHaveBeenCalledLike(
'POST',
'http://example.com/oboez',
'my_data'
)
})
});
describe('put', function(){
it('can put a string', function(){
oboe({ method:'PUT',
url:'http://example.com/oboez',
'body':'my_data'})
expect(wire).toHaveBeenCalledLike(
'PUT',
'http://example.com/oboez',
'my_data'
)
})
});
describe('patch', function(){
it('can patch a string', function(){
oboe({url:'http://example.com/oboez',
body:'my_data',
method:'PATCH'});
expect(wire).toHaveBeenCalledLike(
'PATCH',
'http://example.com/oboez',
'my_data'
)
})
})
});
});
| move beforeEach to end of spec file
| test/specs/publicApi.unit.spec.js | move beforeEach to end of spec file | <ide><path>est/specs/publicApi.unit.spec.js
<ide>
<ide> describe("public api", function(){
<ide> "use strict";
<del>
<del>
<del> this.beforeEach(function(){
<del>
<del> this.addMatchers({
<del> /* Under Jasmine's toHaveBeenCalledLike, subject(foo, undefined)
<del> is considered different from subject(foo). This is slightly
<del> looser and considers those equal.
<del> */
<del> toHaveBeenCalledLike:function(/*expectedArgs*/){
<del> var expectedArgs = Array.prototype.slice.apply(arguments);
<del> var actualCalls = this.actual.calls;
<del>
<del> var equals = this.env.equals_.bind(this.env);
<del>
<del> this.message = function() {
<del> var invertedMessage = "Expected spy " + this.actual.identity + " not to have been called like " + jasmine.pp(expectedArgs) + " but it was.";
<del> var positiveMessage = "";
<del> if (this.actual.callCount === 0) {
<del> positiveMessage = "Expected spy " + this.actual.identity + " to have been called like " + jasmine.pp(expectedArgs) + " but it was never called.";
<del> } else {
<del> positiveMessage = "Expected spy " + this.actual.identity + " to have been called like " + jasmine.pp(expectedArgs) + " but actual calls were " + jasmine.pp(this.actual.argsForCall).replace(/^\[ | \]$/g, '')
<del> }
<del> return [positiveMessage, invertedMessage];
<del> };
<del>
<del> return actualCalls.some(function( actualCall ){
<del>
<del> var actualArgs = actualCall.args;
<del>
<del> // check for one too many arguments given. But this is ok
<del> // if the extra arg is undefined.
<del> if( actualArgs[expectedArgs.length] != undefined ) {
<del>
<del> return false;
<del> }
<del>
<del> return expectedArgs.every(function( expectedArg, index ){
<del>
<del> return equals( actualArgs[index], expectedArg );
<del> });
<del>
<del> });
<del> }
<del> });
<del> })
<ide>
<ide> describe("propagates through to wiring function", function(){
<ide>
<ide> })
<ide>
<ide> });
<add>
<add> this.beforeEach(function(){
<add>
<add> this.addMatchers({
<add> /* Under Jasmine's toHaveBeenCalledLike, subject(foo, undefined)
<add> is considered different from subject(foo). This is slightly
<add> looser and considers those equal.
<add> */
<add> toHaveBeenCalledLike:function(/*expectedArgs*/){
<add> var expectedArgs = Array.prototype.slice.apply(arguments);
<add> var actualCalls = this.actual.calls;
<add>
<add> var equals = this.env.equals_.bind(this.env);
<add>
<add> this.message = function() {
<add> var invertedMessage = "Expected spy " + this.actual.identity + " not to have been called like " + jasmine.pp(expectedArgs) + " but it was.";
<add> var positiveMessage = "";
<add> if (this.actual.callCount === 0) {
<add> positiveMessage = "Expected spy " + this.actual.identity + " to have been called like " + jasmine.pp(expectedArgs) + " but it was never called.";
<add> } else {
<add> positiveMessage = "Expected spy " + this.actual.identity + " to have been called like " + jasmine.pp(expectedArgs) + " but actual calls were " + jasmine.pp(this.actual.argsForCall).replace(/^\[ | \]$/g, '')
<add> }
<add> return [positiveMessage, invertedMessage];
<add> };
<add>
<add> return actualCalls.some(function( actualCall ){
<add>
<add> var actualArgs = actualCall.args;
<add>
<add> // check for one too many arguments given. But this is ok
<add> // if the extra arg is undefined.
<add> if( actualArgs[expectedArgs.length] != undefined ) {
<add>
<add> return false;
<add> }
<add>
<add> return expectedArgs.every(function( expectedArg, index ){
<add>
<add> return equals( actualArgs[index], expectedArg );
<add> });
<add>
<add> });
<add> }
<add> });
<add> })
<ide>
<ide> });
<ide> |
|
JavaScript | apache-2.0 | cae804536785cac5d5a2b3b8e3b9d38c1e623148 | 0 | rogerpueyo/luci,openwrt/luci,tobiaswaldvogel/luci,tobiaswaldvogel/luci,rogerpueyo/luci,hnyman/luci,hnyman/luci,hnyman/luci,lbthomsen/openwrt-luci,rogerpueyo/luci,openwrt/luci,hnyman/luci,lbthomsen/openwrt-luci,openwrt/luci,hnyman/luci,openwrt/luci,rogerpueyo/luci,lbthomsen/openwrt-luci,rogerpueyo/luci,openwrt/luci,lbthomsen/openwrt-luci,openwrt/luci,tobiaswaldvogel/luci,hnyman/luci,hnyman/luci,rogerpueyo/luci,lbthomsen/openwrt-luci,lbthomsen/openwrt-luci,rogerpueyo/luci,openwrt/luci,tobiaswaldvogel/luci,tobiaswaldvogel/luci,tobiaswaldvogel/luci,tobiaswaldvogel/luci,lbthomsen/openwrt-luci,tobiaswaldvogel/luci,openwrt/luci,rogerpueyo/luci,hnyman/luci,lbthomsen/openwrt-luci | 'use strict';
'require fs';
'require ui';
'require rpc';
'require uci';
'require form';
'require network';
'require firewall';
'require tools.widgets as widgets';
function count_changes(section_id) {
var changes = ui.changes.changes, n = 0;
if (!L.isObject(changes))
return n;
if (Array.isArray(changes.wireless))
for (var i = 0; i < changes.wireless.length; i++)
n += (changes.wireless[i][1] == section_id);
return n;
}
function render_radio_badge(radioDev) {
return E('span', { 'class': 'ifacebadge' }, [
E('img', { 'src': L.resource('icons/wifi%s.png').format(radioDev.isUp() ? '' : '_disabled') }),
' ',
radioDev.getName()
]);
}
function render_signal_badge(signalPercent, signalValue, noiseValue, wrap) {
var icon, title, value;
if (signalPercent < 0)
icon = L.resource('icons/signal-none.png');
else if (signalPercent == 0)
icon = L.resource('icons/signal-0.png');
else if (signalPercent < 25)
icon = L.resource('icons/signal-0-25.png');
else if (signalPercent < 50)
icon = L.resource('icons/signal-25-50.png');
else if (signalPercent < 75)
icon = L.resource('icons/signal-50-75.png');
else
icon = L.resource('icons/signal-75-100.png');
if (signalValue != null && signalValue != 0 && noiseValue != null && noiseValue != 0) {
value = '%d/%d\xa0%s'.format(signalValue, noiseValue, _('dBm'));
title = '%s: %d %s / %s: %d %s / %s %d'.format(
_('Signal'), signalValue, _('dBm'),
_('Noise'), noiseValue, _('dBm'),
_('SNR'), signalValue - noiseValue);
}
else if (signalValue != null && signalValue != 0) {
value = '%d %s'.format(signalValue, _('dBm'));
title = '%s: %d %s'.format(_('Signal'), signalValue, _('dBm'));
}
else if (signalPercent > -1) {
value = '\xa0---\xa0';
title = _('No signal');
}
else {
value = E('em', {}, E('small', {}, [ _('disabled') ]));
title = _('Interface is disabled');
}
return E('div', {
'class': wrap ? 'center' : 'ifacebadge',
'title': title,
'data-signal': signalValue,
'data-noise': noiseValue
}, [
E('img', { 'src': icon }),
E('span', {}, [
wrap ? E('br') : ' ',
value
])
]);
}
function render_network_badge(radioNet) {
return render_signal_badge(radioNet.isUp() ? radioNet.getSignalPercent() : -1, radioNet.getSignal(), radioNet.getNoise());
}
function render_radio_status(radioDev, wifiNets) {
var name = radioDev.getI18n().replace(/ Wireless Controller .+$/, ''),
node = E('div', [ E('big', {}, E('strong', {}, name)), E('div') ]),
channel, frequency, bitrate;
for (var i = 0; i < wifiNets.length; i++) {
channel = channel || wifiNets[i].getChannel();
frequency = frequency || wifiNets[i].getFrequency();
bitrate = bitrate || wifiNets[i].getBitRate();
}
if (radioDev.isUp())
L.itemlist(node.lastElementChild, [
_('Channel'), '%s (%s %s)'.format(channel || '?', frequency || '?', _('GHz')),
_('Bitrate'), '%s %s'.format(bitrate || '?', _('Mbit/s'))
], ' | ');
else
node.lastElementChild.appendChild(E('em', _('Device is not active')));
return node;
}
function render_network_status(radioNet) {
var mode = radioNet.getActiveMode(),
bssid = radioNet.getActiveBSSID(),
channel = radioNet.getChannel(),
disabled = (radioNet.get('disabled') == '1' || uci.get('wireless', radioNet.getWifiDeviceName(), 'disabled') == '1'),
is_assoc = (bssid && bssid != '00:00:00:00:00:00' && channel && mode != 'Unknown' && !disabled),
is_mesh = (radioNet.getMode() == 'mesh'),
changecount = count_changes(radioNet.getName()),
status_text = null;
if (changecount)
status_text = E('a', {
href: '#',
click: L.bind(ui.changes.displayChanges, ui.changes)
}, _('Interface has %d pending changes').format(changecount));
else if (!is_assoc)
status_text = E('em', disabled ? _('Wireless is disabled') : _('Wireless is not associated'));
return L.itemlist(E('div'), [
is_mesh ? _('Mesh ID') : _('SSID'), (is_mesh ? radioNet.getMeshID() : radioNet.getSSID()) || '?',
_('Mode'), mode,
_('BSSID'), (!changecount && is_assoc) ? bssid : null,
_('Encryption'), (!changecount && is_assoc) ? radioNet.getActiveEncryption() || _('None') : null,
null, status_text
], [ ' | ', E('br') ]);
}
function render_modal_status(node, radioNet) {
var mode = radioNet.getActiveMode(),
noise = radioNet.getNoise(),
bssid = radioNet.getActiveBSSID(),
channel = radioNet.getChannel(),
disabled = (radioNet.get('disabled') == '1'),
is_assoc = (bssid && bssid != '00:00:00:00:00:00' && channel && mode != 'Unknown' && !disabled);
if (node == null)
node = E('span', { 'class': 'ifacebadge large', 'data-network': radioNet.getName() }, [ E('small'), E('span') ]);
L.dom.content(node.firstElementChild, render_signal_badge(disabled ? -1 : radioNet.getSignalPercent(), radioNet.getSignal(), noise, true));
L.itemlist(node.lastElementChild, [
_('Mode'), mode,
_('SSID'), radioNet.getSSID() || '?',
_('BSSID'), is_assoc ? bssid : null,
_('Encryption'), is_assoc ? radioNet.getActiveEncryption() || _('None') : null,
_('Channel'), is_assoc ? '%d (%.3f %s)'.format(radioNet.getChannel(), radioNet.getFrequency() || 0, _('GHz')) : null,
_('Tx-Power'), is_assoc ? '%d %s'.format(radioNet.getTXPower(), _('dBm')) : null,
_('Signal'), is_assoc ? '%d %s'.format(radioNet.getSignal(), _('dBm')) : null,
_('Noise'), (is_assoc && noise != null) ? '%d %s'.format(noise, _('dBm')) : null,
_('Bitrate'), is_assoc ? '%.1f %s'.format(radioNet.getBitRate() || 0, _('Mbit/s')) : null,
_('Country'), is_assoc ? radioNet.getCountryCode() : null
], [ ' | ', E('br'), E('br'), E('br'), E('br'), E('br'), ' | ', E('br'), ' | ' ]);
if (!is_assoc)
L.dom.append(node.lastElementChild, E('em', disabled ? _('Wireless is disabled') : _('Wireless is not associated')));
return node;
}
function format_wifirate(rate) {
var s = '%.1f\xa0%s, %d\xa0%s'.format(rate.rate / 1000, _('Mbit/s'), rate.mhz, _('MHz')),
ht = rate.ht, vht = rate.vht,
mhz = rate.mhz, nss = rate.nss,
mcs = rate.mcs, sgi = rate.short_gi;
if (ht || vht) {
if (vht) s += ', VHT-MCS\xa0%d'.format(mcs);
if (nss) s += ', VHT-NSS\xa0%d'.format(nss);
if (ht) s += ', MCS\xa0%s'.format(mcs);
if (sgi) s += ', ' + _('Short GI').replace(/ /g, '\xa0');
}
return s;
}
function radio_restart(id, ev) {
var row = document.querySelector('.cbi-section-table-row[data-sid="%s"]'.format(id)),
dsc = row.querySelector('[data-name="_stat"] > div'),
btn = row.querySelector('.cbi-section-actions button');
btn.blur();
btn.classList.add('spinning');
btn.disabled = true;
dsc.setAttribute('restart', '');
L.dom.content(dsc, E('em', _('Device is restarting…')));
}
function network_updown(id, map, ev) {
var radio = uci.get('wireless', id, 'device'),
disabled = (uci.get('wireless', id, 'disabled') == '1') ||
(uci.get('wireless', radio, 'disabled') == '1');
if (disabled) {
uci.unset('wireless', id, 'disabled');
uci.unset('wireless', radio, 'disabled');
}
else {
uci.set('wireless', id, 'disabled', '1');
var all_networks_disabled = true,
wifi_ifaces = uci.sections('wireless', 'wifi-iface');
for (var i = 0; i < wifi_ifaces.length; i++) {
if (wifi_ifaces[i].device == radio && wifi_ifaces[i].disabled != '1') {
all_networks_disabled = false;
break;
}
}
if (all_networks_disabled)
uci.set('wireless', radio, 'disabled', '1');
}
return map.save().then(function() {
ui.changes.apply()
});
}
function next_free_sid(offset) {
var sid = 'wifinet' + offset;
while (uci.get('wireless', sid))
sid = 'wifinet' + (++offset);
return sid;
}
function add_dependency_permutations(o, deps) {
var res = null;
for (var key in deps) {
if (!deps.hasOwnProperty(key) || !Array.isArray(deps[key]))
continue;
var list = deps[key],
tmp = [];
for (var j = 0; j < list.length; j++) {
for (var k = 0; k < (res ? res.length : 1); k++) {
var item = (res ? Object.assign({}, res[k]) : {});
item[key] = list[j];
tmp.push(item);
}
}
res = tmp;
}
for (var i = 0; i < (res ? res.length : 0); i++)
o.depends(res[i]);
}
var CBIWifiFrequencyValue = form.Value.extend({
callFrequencyList: rpc.declare({
object: 'iwinfo',
method: 'freqlist',
params: [ 'device' ],
expect: { results: [] }
}),
load: function(section_id) {
return Promise.all([
network.getWifiDevice(section_id),
this.callFrequencyList(section_id)
]).then(L.bind(function(data) {
this.channels = {
'11g': L.hasSystemFeature('hostapd', 'acs') ? [ 'auto', 'auto', true ] : [],
'11a': L.hasSystemFeature('hostapd', 'acs') ? [ 'auto', 'auto', true ] : []
};
for (var i = 0; i < data[1].length; i++)
this.channels[(data[1][i].mhz > 2484) ? '11a' : '11g'].push(
data[1][i].channel,
'%d (%d Mhz)'.format(data[1][i].channel, data[1][i].mhz),
!data[1][i].restricted
);
var hwmodelist = L.toArray(data[0] ? data[0].getHWModes() : null)
.reduce(function(o, v) { o[v] = true; return o }, {});
this.modes = [
'', 'Legacy', true,
'n', 'N', hwmodelist.n,
'ac', 'AC', hwmodelist.ac
];
var htmodelist = L.toArray(data[0] ? data[0].getHTModes() : null)
.reduce(function(o, v) { o[v] = true; return o }, {});
this.htmodes = {
'': [ '', '-', true ],
'n': [
'HT20', '20 MHz', htmodelist.HT20,
'HT40', '40 MHz', htmodelist.HT40
],
'ac': [
'VHT20', '20 MHz', htmodelist.VHT20,
'VHT40', '40 MHz', htmodelist.VHT40,
'VHT80', '80 MHz', htmodelist.VHT80,
'VHT160', '160 MHz', htmodelist.VHT160
]
};
this.bands = {
'': [
'11g', '2.4 GHz', this.channels['11g'].length > 3,
'11a', '5 GHz', this.channels['11a'].length > 3
],
'n': [
'11g', '2.4 GHz', this.channels['11g'].length > 3,
'11a', '5 GHz', this.channels['11a'].length > 3
],
'ac': [
'11a', '5 GHz', true
]
};
}, this));
},
setValues: function(sel, vals) {
if (sel.vals)
sel.vals.selected = sel.selectedIndex;
while (sel.options[0])
sel.remove(0);
for (var i = 0; vals && i < vals.length; i += 3)
if (vals[i+2])
sel.add(E('option', { value: vals[i+0] }, [ vals[i+1] ]));
if (vals && !isNaN(vals.selected))
sel.selectedIndex = vals.selected;
sel.parentNode.style.display = (sel.options.length <= 1) ? 'none' : '';
sel.vals = vals;
},
toggleWifiMode: function(elem) {
this.toggleWifiHTMode(elem);
this.toggleWifiBand(elem);
},
toggleWifiHTMode: function(elem) {
var mode = elem.querySelector('.mode');
var bwdt = elem.querySelector('.htmode');
this.setValues(bwdt, this.htmodes[mode.value]);
},
toggleWifiBand: function(elem) {
var mode = elem.querySelector('.mode');
var band = elem.querySelector('.band');
this.setValues(band, this.bands[mode.value]);
this.toggleWifiChannel(elem);
},
toggleWifiChannel: function(elem) {
var band = elem.querySelector('.band');
var chan = elem.querySelector('.channel');
this.setValues(chan, this.channels[band.value]);
},
setInitialValues: function(section_id, elem) {
var mode = elem.querySelector('.mode'),
band = elem.querySelector('.band'),
chan = elem.querySelector('.channel'),
bwdt = elem.querySelector('.htmode'),
htval = uci.get('wireless', section_id, 'htmode'),
hwval = uci.get('wireless', section_id, 'hwmode'),
chval = uci.get('wireless', section_id, 'channel');
this.setValues(mode, this.modes);
if (/VHT20|VHT40|VHT80|VHT160/.test(htval))
mode.value = 'ac';
else if (/HT20|HT40/.test(htval))
mode.value = 'n';
else
mode.value = '';
this.toggleWifiMode(elem);
if (/a/.test(hwval))
band.value = '11a';
else
band.value = '11g';
this.toggleWifiBand(elem);
bwdt.value = htval;
chan.value = chval;
return elem;
},
renderWidget: function(section_id, option_index, cfgvalue) {
var elem = E('div');
L.dom.content(elem, [
E('label', { 'style': 'float:left; margin-right:3px' }, [
_('Mode'), E('br'),
E('select', {
'class': 'mode',
'style': 'width:auto',
'change': L.bind(this.toggleWifiMode, this, elem)
})
]),
E('label', { 'style': 'float:left; margin-right:3px' }, [
_('Band'), E('br'),
E('select', {
'class': 'band',
'style': 'width:auto',
'change': L.bind(this.toggleWifiBand, this, elem)
})
]),
E('label', { 'style': 'float:left; margin-right:3px' }, [
_('Channel'), E('br'),
E('select', {
'class': 'channel',
'style': 'width:auto'
})
]),
E('label', { 'style': 'float:left; margin-right:3px' }, [
_('Width'), E('br'),
E('select', {
'class': 'htmode',
'style': 'width:auto'
})
]),
E('br', { 'style': 'clear:left' })
]);
return this.setInitialValues(section_id, elem);
},
cfgvalue: function(section_id) {
return [
uci.get('wireless', section_id, 'htmode'),
uci.get('wireless', section_id, 'hwmode'),
uci.get('wireless', section_id, 'channel')
];
},
formvalue: function(section_id) {
var node = this.map.findElement('data-field', this.cbid(section_id));
return [
node.querySelector('.htmode').value,
node.querySelector('.band').value,
node.querySelector('.channel').value
];
},
write: function(section_id, value) {
uci.set('wireless', section_id, 'htmode', value[0] || null);
uci.set('wireless', section_id, 'hwmode', value[1]);
uci.set('wireless', section_id, 'channel', value[2]);
}
});
var CBIWifiTxPowerValue = form.ListValue.extend({
callTxPowerList: rpc.declare({
object: 'iwinfo',
method: 'txpowerlist',
params: [ 'device' ],
expect: { results: [] }
}),
load: function(section_id) {
return this.callTxPowerList(section_id).then(L.bind(function(pwrlist) {
this.powerval = this.wifiNetwork ? this.wifiNetwork.getTXPower() : null;
this.poweroff = this.wifiNetwork ? this.wifiNetwork.getTXPowerOffset() : null;
this.value('', _('driver default'));
for (var i = 0; i < pwrlist.length; i++)
this.value(pwrlist[i].dbm, '%d dBm (%d mW)'.format(pwrlist[i].dbm, pwrlist[i].mw));
return form.ListValue.prototype.load.apply(this, [section_id]);
}, this));
},
renderWidget: function(section_id, option_index, cfgvalue) {
var widget = form.ListValue.prototype.renderWidget.apply(this, [section_id, option_index, cfgvalue]);
widget.firstElementChild.style.width = 'auto';
L.dom.append(widget, E('span', [
' - ', _('Current power'), ': ',
E('span', [ this.powerval != null ? '%d dBm'.format(this.powerval) : E('em', _('unknown')) ]),
this.poweroff ? ' + %d dB offset = %s dBm'.format(this.poweroff, this.powerval != null ? this.powerval + this.poweroff : '?') : ''
]));
return widget;
}
});
var CBIWifiCountryValue = form.Value.extend({
callCountryList: rpc.declare({
object: 'iwinfo',
method: 'countrylist',
params: [ 'device' ],
expect: { results: [] }
}),
load: function(section_id) {
return this.callCountryList(section_id).then(L.bind(function(countrylist) {
if (Array.isArray(countrylist) && countrylist.length > 0) {
this.value('', _('driver default'));
for (var i = 0; i < countrylist.length; i++)
this.value(countrylist[i].iso3166, '%s - %s'.format(countrylist[i].iso3166, countrylist[i].country));
}
return form.Value.prototype.load.apply(this, [section_id]);
}, this));
},
validate: function(section_id, formvalue) {
if (formvalue != null && formvalue != '' && !/^[A-Z0-9][A-Z0-9]$/.test(formvalue))
return _('Use ISO/IEC 3166 alpha2 country codes.');
return true;
},
renderWidget: function(section_id, option_index, cfgvalue) {
var typeClass = (this.keylist && this.keylist.length) ? form.ListValue : form.Value;
return typeClass.prototype.renderWidget.apply(this, [section_id, option_index, cfgvalue]);
}
});
return L.view.extend({
poll_status: function(map, data) {
var rows = map.querySelectorAll('.cbi-section-table-row[data-sid]');
for (var i = 0; i < rows.length; i++) {
var section_id = rows[i].getAttribute('data-sid'),
radioDev = data[1].filter(function(d) { return d.getName() == section_id })[0],
radioNet = data[2].filter(function(n) { return n.getName() == section_id })[0],
badge = rows[i].querySelector('[data-name="_badge"] > div'),
stat = rows[i].querySelector('[data-name="_stat"]'),
btns = rows[i].querySelectorAll('.cbi-section-actions button'),
busy = btns[0].classList.contains('spinning') || btns[1].classList.contains('spinning') || btns[2].classList.contains('spinning');
if (radioDev) {
L.dom.content(badge, render_radio_badge(radioDev));
L.dom.content(stat, render_radio_status(radioDev, data[2].filter(function(n) { return n.getWifiDeviceName() == radioDev.getName() })));
}
else {
L.dom.content(badge, render_network_badge(radioNet));
L.dom.content(stat, render_network_status(radioNet));
}
if (stat.hasAttribute('restart'))
L.dom.content(stat, E('em', _('Device is restarting…')));
btns[0].disabled = busy;
btns[1].disabled = busy;
btns[2].disabled = busy;
}
var table = document.querySelector('#wifi_assoclist_table'),
hosts = data[0],
trows = [];
for (var i = 0; i < data[3].length; i++) {
var bss = data[3][i],
name = hosts.getHostnameByMACAddr(bss.mac),
ipv4 = hosts.getIPAddrByMACAddr(bss.mac),
ipv6 = hosts.getIP6AddrByMACAddr(bss.mac);
var hint;
if (name && ipv4 && ipv6)
hint = '%s <span class="hide-xs">(%s, %s)</span>'.format(name, ipv4, ipv6);
else if (name && (ipv4 || ipv6))
hint = '%s <span class="hide-xs">(%s)</span>'.format(name, ipv4 || ipv6);
else
hint = name || ipv4 || ipv6 || '?';
var row = [
E('span', {
'class': 'ifacebadge',
'data-ifname': bss.network.getIfname(),
'data-ssid': bss.network.getSSID()
}, [
E('img', {
'src': L.resource('icons/wifi%s.png').format(bss.network.isUp() ? '' : '_disabled'),
'title': bss.radio.getI18n()
}),
E('span', [
' %s '.format(bss.network.getShortName()),
E('small', '(%s)'.format(bss.network.getIfname()))
])
]),
bss.mac,
hint,
render_signal_badge(Math.min((bss.signal + 110) / 70 * 100, 100), bss.signal, bss.noise),
E('span', {}, [
E('span', format_wifirate(bss.rx)),
E('br'),
E('span', format_wifirate(bss.tx))
])
];
if (bss.network.isClientDisconnectSupported()) {
if (table.firstElementChild.childNodes.length < 6)
table.firstElementChild.appendChild(E('div', { 'class': 'th cbi-section-actions'}));
row.push(E('button', {
'class': 'cbi-button cbi-button-remove',
'click': L.bind(function(net, mac, ev) {
L.dom.parent(ev.currentTarget, '.tr').style.opacity = 0.5;
ev.currentTarget.classList.add('spinning');
ev.currentTarget.disabled = true;
ev.currentTarget.blur();
net.disconnectClient(mac, true, 5, 60000);
}, this, bss.network, bss.mac)
}, [ _('Disconnect') ]));
}
else {
row.push('-');
}
trows.push(row);
}
cbi_update_table(table, trows, E('em', _('No information available')));
var stat = document.querySelector('.cbi-modal [data-name="_wifistat_modal"] .ifacebadge.large');
if (stat)
render_modal_status(stat, data[2].filter(function(n) { return n.getName() == stat.getAttribute('data-network') })[0]);
return network.flushCache();
},
load: function() {
return Promise.all([
uci.changes(),
uci.load('wireless')
]);
},
checkAnonymousSections: function() {
var wifiIfaces = uci.sections('wireless', 'wifi-iface');
for (var i = 0; i < wifiIfaces.length; i++)
if (wifiIfaces[i]['.anonymous'])
return true;
return false;
},
callUciRename: rpc.declare({
object: 'uci',
method: 'rename',
params: [ 'config', 'section', 'name' ]
}),
render: function() {
if (this.checkAnonymousSections())
return this.renderMigration();
else
return this.renderOverview();
},
handleMigration: function(ev) {
var wifiIfaces = uci.sections('wireless', 'wifi-iface'),
id_offset = 0,
tasks = [];
for (var i = 0; i < wifiIfaces.length; i++) {
if (!wifiIfaces[i]['.anonymous'])
continue;
var new_name = next_free_sid(id_offset);
tasks.push(this.callUciRename('wireless', wifiIfaces[i]['.name'], new_name));
id_offset = +new_name.substring(7) + 1;
}
return Promise.all(tasks)
.then(L.bind(ui.changes.init, ui.changes))
.then(L.bind(ui.changes.apply, ui.changes));
},
renderMigration: function() {
ui.showModal(_('Wireless configuration migration'), [
E('p', _('The existing wireless configuration needs to be changed for LuCI to function properly.')),
E('p', _('Upon pressing "Continue", anonymous "wifi-iface" sections will be assigned with a name in the form <em>wifinet#</em> and the network will be restarted to apply the updated configuration.')),
E('div', { 'class': 'right' },
E('button', {
'class': 'btn cbi-button-action important',
'click': ui.createHandlerFn(this, 'handleMigration')
}, _('Continue')))
]);
},
renderOverview: function() {
var m, s, o;
m = new form.Map('wireless');
m.chain('network');
m.chain('firewall');
s = m.section(form.GridSection, 'wifi-device', _('Wireless Overview'));
s.anonymous = true;
s.addremove = false;
s.load = function() {
return network.getWifiDevices().then(L.bind(function(radios) {
this.radios = radios.sort(function(a, b) {
return a.getName() > b.getName();
});
var tasks = [];
for (var i = 0; i < radios.length; i++)
tasks.push(radios[i].getWifiNetworks());
return Promise.all(tasks);
}, this)).then(L.bind(function(data) {
this.wifis = [];
for (var i = 0; i < data.length; i++)
this.wifis.push.apply(this.wifis, data[i]);
}, this));
};
s.cfgsections = function() {
var rv = [];
for (var i = 0; i < this.radios.length; i++) {
rv.push(this.radios[i].getName());
for (var j = 0; j < this.wifis.length; j++)
if (this.wifis[j].getWifiDeviceName() == this.radios[i].getName())
rv.push(this.wifis[j].getName());
}
return rv;
};
s.modaltitle = function(section_id) {
var radioNet = this.wifis.filter(function(w) { return w.getName() == section_id})[0];
return radioNet ? radioNet.getI18n() : _('Edit wireless network');
};
s.lookupRadioOrNetwork = function(section_id) {
var radioDev = this.radios.filter(function(r) { return r.getName() == section_id })[0];
if (radioDev)
return radioDev;
var radioNet = this.wifis.filter(function(w) { return w.getName() == section_id })[0];
if (radioNet)
return radioNet;
return null;
};
s.renderRowActions = function(section_id) {
var inst = this.lookupRadioOrNetwork(section_id), btns;
if (inst.getWifiNetworks) {
btns = [
E('button', {
'class': 'cbi-button cbi-button-neutral',
'title': _('Restart radio interface'),
'click': ui.createHandlerFn(this, radio_restart, section_id)
}, _('Restart')),
E('button', {
'class': 'cbi-button cbi-button-action important',
'title': _('Find and join network'),
'click': ui.createHandlerFn(this, 'handleScan', inst)
}, _('Scan')),
E('button', {
'class': 'cbi-button cbi-button-add',
'title': _('Provide new network'),
'click': ui.createHandlerFn(this, 'handleAdd', inst)
}, _('Add'))
];
}
else {
var isDisabled = (inst.get('disabled') == '1' ||
uci.get('wireless', inst.getWifiDeviceName(), 'disabled') == '1');
btns = [
E('button', {
'class': 'cbi-button cbi-button-neutral enable-disable',
'title': isDisabled ? _('Enable this network') : _('Disable this network'),
'click': ui.createHandlerFn(this, network_updown, section_id, this.map)
}, isDisabled ? _('Enable') : _('Disable')),
E('button', {
'class': 'cbi-button cbi-button-action important',
'title': _('Edit this network'),
'click': ui.createHandlerFn(this, 'renderMoreOptionsModal', section_id)
}, _('Edit')),
E('button', {
'class': 'cbi-button cbi-button-negative remove',
'title': _('Delete this network'),
'click': ui.createHandlerFn(this, 'handleRemove', section_id)
}, _('Remove'))
];
}
return E('div', { 'class': 'td middle cbi-section-actions' }, E('div', btns));
};
s.addModalOptions = function(s) {
return network.getWifiNetwork(s.section).then(function(radioNet) {
var hwtype = uci.get('wireless', radioNet.getWifiDeviceName(), 'type');
var o, ss;
o = s.option(form.SectionValue, '_device', form.NamedSection, radioNet.getWifiDeviceName(), 'wifi-device', _('Device Configuration'));
o.modalonly = true;
ss = o.subsection;
ss.tab('general', _('General Setup'));
ss.tab('advanced', _('Advanced Settings'));
var isDisabled = (radioNet.get('disabled') == '1' ||
uci.get('wireless', radioNet.getWifiDeviceName(), 'disabled') == 1);
o = ss.taboption('general', form.DummyValue, '_wifistat_modal', _('Status'));
o.cfgvalue = L.bind(function(radioNet) {
return render_modal_status(null, radioNet);
}, this, radioNet);
o.write = function() {};
o = ss.taboption('general', form.Button, '_toggle', isDisabled ? _('Wireless network is disabled') : _('Wireless network is enabled'));
o.inputstyle = isDisabled ? 'apply' : 'reset';
o.inputtitle = isDisabled ? _('Enable') : _('Disable');
o.onclick = ui.createHandlerFn(s, network_updown, s.section, s.map);
o = ss.taboption('general', CBIWifiFrequencyValue, '_freq', '<br />' + _('Operating frequency'));
o.ucisection = s.section;
if (hwtype == 'mac80211') {
o = ss.taboption('general', CBIWifiTxPowerValue, 'txpower', _('Maximum transmit power'), _('Specifies the maximum transmit power the wireless radio may use. Depending on regulatory requirements and wireless usage, the actual transmit power may be reduced by the driver.'));
o.wifiNetwork = radioNet;
o = ss.taboption('advanced', CBIWifiCountryValue, 'country', _('Country Code'));
o.wifiNetwork = radioNet;
o = ss.taboption('advanced', form.Flag, 'legacy_rates', _('Allow legacy 802.11b rates'));
o.default = o.enabled;
o = ss.taboption('advanced', form.Value, 'distance', _('Distance Optimization'), _('Distance to farthest network member in meters.'));
o.datatype = 'range(0,114750)';
o.placeholder = 'auto';
o = ss.taboption('advanced', form.Value, 'frag', _('Fragmentation Threshold'));
o.datatype = 'min(256)';
o.placeholder = _('off');
o = ss.taboption('advanced', form.Value, 'rts', _('RTS/CTS Threshold'));
o.datatype = 'uinteger';
o.placeholder = _('off');
o = ss.taboption('advanced', form.Flag, 'noscan', _('Force 40MHz mode'), _('Always use 40MHz channels even if the secondary channel overlaps. Using this option does not comply with IEEE 802.11n-2009!'));
o.rmempty = true;
o = ss.taboption('advanced', form.Value, 'beacon_int', _('Beacon Interval'));
o.datatype = 'range(15,65535)';
o.placeholder = 100;
o.rmempty = true;
}
o = s.option(form.SectionValue, '_device', form.NamedSection, radioNet.getName(), 'wifi-iface', _('Interface Configuration'));
o.modalonly = true;
ss = o.subsection;
ss.tab('general', _('General Setup'));
ss.tab('encryption', _('Wireless Security'));
ss.tab('macfilter', _('MAC-Filter'));
ss.tab('advanced', _('Advanced Settings'));
o = ss.taboption('general', form.ListValue, 'mode', _('Mode'));
o.value('ap', _('Access Point'));
o.value('sta', _('Client'));
o.value('adhoc', _('Ad-Hoc'));
o = ss.taboption('general', form.Value, 'mesh_id', _('Mesh Id'));
o.depends('mode', 'mesh');
o = ss.taboption('advanced', form.Flag, 'mesh_fwding', _('Forward mesh peer traffic'));
o.rmempty = false;
o.default = '1';
o.depends('mode', 'mesh');
o = ss.taboption('advanced', form.Value, 'mesh_rssi_threshold', _('RSSI threshold for joining'), _('0 = not using RSSI threshold, 1 = do not change driver default'));
o.rmempty = false;
o.default = '0';
o.datatype = 'range(-255,1)';
o.depends('mode', 'mesh');
o = ss.taboption('general', form.Value, 'ssid', _('<abbr title="Extended Service Set Identifier">ESSID</abbr>'));
o.datatype = 'maxlength(32)';
o.depends('mode', 'ap');
o.depends('mode', 'sta');
o.depends('mode', 'adhoc');
o.depends('mode', 'ahdemo');
o.depends('mode', 'monitor');
o.depends('mode', 'ap-wds');
o.depends('mode', 'sta-wds');
o.depends('mode', 'wds');
o = ss.taboption('general', form.Value, 'bssid', _('<abbr title="Basic Service Set Identifier">BSSID</abbr>'));
o.datatype = 'macaddr';
o = ss.taboption('general', widgets.NetworkSelect, 'network', _('Network'), _('Choose the network(s) you want to attach to this wireless interface or fill out the <em>create</em> field to define a new network.'));
o.rmempty = true;
o.multiple = true;
o.novirtual = true;
o.write = function(section_id, value) {
return network.getDevice(section_id).then(L.bind(function(dev) {
var old_networks = dev.getNetworks().reduce(function(o, v) { o[v.getName()] = v; return o }, {}),
new_networks = {},
values = L.toArray(value),
tasks = [];
for (var i = 0; i < values.length; i++) {
new_networks[values[i]] = true;
if (old_networks[values[i]])
continue;
tasks.push(network.getNetwork(values[i]).then(L.bind(function(name, net) {
return net || network.addNetwork(name, { proto: 'none' });
}, this, values[i])).then(L.bind(function(dev, net) {
if (net) {
if (!net.isEmpty())
net.set('type', 'bridge');
net.addDevice(dev);
}
}, this, dev)));
}
for (var name in old_networks)
if (!new_networks[name])
tasks.push(network.getNetwork(name).then(L.bind(function(dev, net) {
if (net)
net.deleteDevice(dev);
}, this, dev)));
return Promise.all(tasks);
}, this));
};
if (hwtype == 'mac80211') {
var mode = ss.children[0],
bssid = ss.children[5],
encr;
mode.value('mesh', '802.11s');
mode.value('ahdemo', _('Pseudo Ad-Hoc (ahdemo)'));
mode.value('monitor', _('Monitor'));
bssid.depends('mode', 'adhoc');
bssid.depends('mode', 'sta');
bssid.depends('mode', 'sta-wds');
o = ss.taboption('macfilter', form.ListValue, 'macfilter', _('MAC-Address Filter'));
o.depends('mode', 'ap');
o.depends('mode', 'ap-wds');
o.value('', _('disable'));
o.value('allow', _('Allow listed only'));
o.value('deny', _('Allow all except listed'));
o = ss.taboption('macfilter', form.DynamicList, 'maclist', _('MAC-List'));
o.datatype = 'macaddr';
o.depends('macfilter', 'allow');
o.depends('macfilter', 'deny');
o.load = function(section_id) {
return network.getHostHints().then(L.bind(function(hints) {
hints.getMACHints().map(L.bind(function(hint) {
this.value(hint[0], hint[1] ? '%s (%s)'.format(hint[0], hint[1]) : hint[0]);
}, this));
return form.DynamicList.prototype.load.apply(this, [section_id]);
}, this));
};
mode.value('ap-wds', '%s (%s)'.format(_('Access Point'), _('WDS')));
mode.value('sta-wds', '%s (%s)'.format(_('Client'), _('WDS')));
mode.write = function(section_id, value) {
switch (value) {
case 'ap-wds':
uci.set('wireless', section_id, 'mode', 'ap');
uci.set('wireless', section_id, 'wds', '1');
break;
case 'sta-wds':
uci.set('wireless', section_id, 'mode', 'sta');
uci.set('wireless', section_id, 'wds', '1');
break;
default:
uci.set('wireless', section_id, 'mode', value);
uci.unset('wireless', section_id, 'wds');
break;
}
};
mode.cfgvalue = function(section_id) {
var mode = uci.get('wireless', section_id, 'mode'),
wds = uci.get('wireless', section_id, 'wds');
if (mode == 'ap' && wds)
return 'ap-wds';
else if (mode == 'sta' && wds)
return 'sta-wds';
return mode;
};
o = ss.taboption('general', form.Flag, 'hidden', _('Hide <abbr title="Extended Service Set Identifier">ESSID</abbr>'));
o.depends('mode', 'ap');
o.depends('mode', 'ap-wds');
o = ss.taboption('general', form.Flag, 'wmm', _('WMM Mode'));
o.depends('mode', 'ap');
o.depends('mode', 'ap-wds');
o.default = o.enabled;
o = ss.taboption('advanced', form.Flag, 'isolate', _('Isolate Clients'), _('Prevents client-to-client communication'));
o.depends('mode', 'ap');
o.depends('mode', 'ap-wds');
o = ss.taboption('advanced', form.Value, 'ifname', _('Interface name'), _('Override default interface name'));
o.optional = true;
o.placeholder = radioNet.getIfname();
if (/^radio\d+\.network/.test(o.placeholder))
o.placeholder = '';
o = ss.taboption('advanced', form.Flag, 'short_preamble', _('Short Preamble'));
o.default = o.enabled;
o = ss.taboption('advanced', form.Value, 'dtim_period', _('DTIM Interval'), _('Delivery Traffic Indication Message Interval'));
o.optional = true;
o.placeholder = 2;
o.datatype = 'range(1,255)';
o = ss.taboption('advanced', form.Value, 'wpa_group_rekey', _('Time interval for rekeying GTK'), _('sec'));
o.optional = true;
o.placeholder = 600;
o.datatype = 'uinteger';
o = ss.taboption('advanced', form.Flag , 'skip_inactivity_poll', _('Disable Inactivity Polling'));
o.optional = true;
o.datatype = 'uinteger';
o = ss.taboption('advanced', form.Value, 'max_inactivity', _('Station inactivity limit'), _('sec'));
o.optional = true;
o.placeholder = 300;
o.datatype = 'uinteger';
o = ss.taboption('advanced', form.Value, 'max_listen_interval', _('Maximum allowed Listen Interval'));
o.optional = true;
o.placeholder = 65535;
o.datatype = 'uinteger';
o = ss.taboption('advanced', form.Flag, 'disassoc_low_ack', _('Disassociate On Low Acknowledgement'), _('Allow AP mode to disconnect STAs based on low ACK condition'));
o.default = o.enabled;
}
encr = o = ss.taboption('encryption', form.ListValue, 'encryption', _('Encryption'));
o.depends('mode', 'ap');
o.depends('mode', 'sta');
o.depends('mode', 'adhoc');
o.depends('mode', 'ahdemo');
o.depends('mode', 'ap-wds');
o.depends('mode', 'sta-wds');
o.depends('mode', 'mesh');
o.cfgvalue = function(section_id) {
var v = String(uci.get('wireless', section_id, 'encryption'));
if (v == 'wep')
return 'wep-open';
else if (v.match(/\+/))
return v.replace(/\+.+$/, '');
return v;
};
o.write = function(section_id, value) {
var e = this.section.children.filter(function(o) { return o.option == 'encryption' })[0].formvalue(section_id),
co = this.section.children.filter(function(o) { return o.option == 'cipher' })[0], c = co.formvalue(section_id);
if (value == 'wpa' || value == 'wpa2' || value == 'wpa3' || value == 'wpa3-mixed')
uci.unset('wireless', section_id, 'key');
if (co.isActive(section_id) && e && (c == 'tkip' || c == 'ccmp' || c == 'tkip+ccmp'))
e += '+' + c;
uci.set('wireless', section_id, 'encryption', e);
};
o = ss.taboption('encryption', form.ListValue, 'cipher', _('Cipher'));
o.depends('encryption', 'wpa');
o.depends('encryption', 'wpa2');
o.depends('encryption', 'wpa3');
o.depends('encryption', 'wpa3-mixed');
o.depends('encryption', 'psk');
o.depends('encryption', 'psk2');
o.depends('encryption', 'wpa-mixed');
o.depends('encryption', 'psk-mixed');
o.value('auto', _('auto'));
o.value('ccmp', _('Force CCMP (AES)'));
o.value('tkip', _('Force TKIP'));
o.value('tkip+ccmp', _('Force TKIP and CCMP (AES)'));
o.write = ss.children.filter(function(o) { return o.option == 'encryption' })[0].write;
o.cfgvalue = function(section_id) {
var v = String(uci.get('wireless', section_id, 'encryption'));
if (v.match(/\+/)) {
v = v.replace(/^[^+]+\+/, '');
if (v == 'aes')
v = 'ccmp';
else if (v == 'tkip+aes' || v == 'aes+tkip' || v == 'ccmp+tkip')
v = 'tkip+ccmp';
}
return v;
};
var crypto_modes = [];
if (hwtype == 'mac80211') {
var has_supplicant = L.hasSystemFeature('wpasupplicant'),
has_hostapd = L.hasSystemFeature('hostapd');
// Probe EAP support
var has_ap_eap = L.hasSystemFeature('hostapd', 'eap'),
has_sta_eap = L.hasSystemFeature('wpasupplicant', 'eap');
// Probe SAE support
var has_ap_sae = L.hasSystemFeature('hostapd', 'sae'),
has_sta_sae = L.hasSystemFeature('wpasupplicant', 'sae');
// Probe OWE support
var has_ap_owe = L.hasSystemFeature('hostapd', 'owe'),
has_sta_owe = L.hasSystemFeature('wpasupplicant', 'owe');
// Probe Suite-B support
var has_ap_eap192 = L.hasSystemFeature('hostapd', 'suiteb192'),
has_sta_eap192 = L.hasSystemFeature('wpasupplicant', 'suiteb192');
if (has_hostapd || has_supplicant) {
crypto_modes.push(['psk2', 'WPA2-PSK', 35]);
crypto_modes.push(['psk-mixed', 'WPA-PSK/WPA2-PSK Mixed Mode', 22]);
crypto_modes.push(['psk', 'WPA-PSK', 21]);
}
else {
encr.description = _('WPA-Encryption requires wpa_supplicant (for client mode) or hostapd (for AP and ad-hoc mode) to be installed.');
}
if (has_ap_sae || has_sta_sae) {
crypto_modes.push(['sae', 'WPA3-SAE', 31]);
crypto_modes.push(['sae-mixed', 'WPA2-PSK/WPA3-SAE Mixed Mode', 30]);
}
if (has_ap_eap || has_sta_eap) {
if (has_ap_eap192 || has_sta_eap192) {
crypto_modes.push(['wpa3', 'WPA3-EAP', 33]);
crypto_modes.push(['wpa3-mixed', 'WPA2-EAP/WPA3-EAP Mixed Mode', 32]);
}
crypto_modes.push(['wpa2', 'WPA2-EAP', 34]);
crypto_modes.push(['wpa', 'WPA-EAP', 20]);
}
if (has_ap_owe || has_sta_owe) {
crypto_modes.push(['owe', 'OWE', 1]);
}
encr.crypto_support = {
'ap': {
'wep-open': true,
'wep-shared': true,
'psk': has_hostapd || _('Requires hostapd'),
'psk2': has_hostapd || _('Requires hostapd'),
'psk-mixed': has_hostapd || _('Requires hostapd'),
'sae': has_ap_sae || _('Requires hostapd with SAE support'),
'sae-mixed': has_ap_sae || _('Requires hostapd with SAE support'),
'wpa': has_ap_eap || _('Requires hostapd with EAP support'),
'wpa2': has_ap_eap || _('Requires hostapd with EAP support'),
'wpa3': has_ap_eap192 || _('Requires hostapd with EAP Suite-B support'),
'wpa3-mixed': has_ap_eap192 || _('Requires hostapd with EAP Suite-B support'),
'owe': has_ap_owe || _('Requires hostapd with OWE support')
},
'sta': {
'wep-open': true,
'wep-shared': true,
'psk': has_supplicant || _('Requires wpa-supplicant'),
'psk2': has_supplicant || _('Requires wpa-supplicant'),
'psk-mixed': has_supplicant || _('Requires wpa-supplicant'),
'sae': has_sta_sae || _('Requires wpa-supplicant with SAE support'),
'sae-mixed': has_sta_sae || _('Requires wpa-supplicant with SAE support'),
'wpa': has_sta_eap || _('Requires wpa-supplicant with EAP support'),
'wpa2': has_sta_eap || _('Requires wpa-supplicant with EAP support'),
'wpa3': has_sta_eap192 || _('Requires wpa-supplicant with EAP Suite-B support'),
'wpa3-mixed': has_sta_eap192 || _('Requires wpa-supplicant with EAP Suite-B support'),
'owe': has_sta_owe || _('Requires wpa-supplicant with OWE support')
},
'adhoc': {
'wep-open': true,
'wep-shared': true,
'psk': has_supplicant || _('Requires wpa-supplicant'),
'psk2': has_supplicant || _('Requires wpa-supplicant'),
'psk-mixed': has_supplicant || _('Requires wpa-supplicant'),
},
'mesh': {
'sae': has_sta_sae || _('Requires wpa-supplicant with SAE support')
},
'ahdemo': {
'wep-open': true,
'wep-shared': true
},
'wds': {
'wep-open': true,
'wep-shared': true
}
};
encr.crypto_support['ap-wds'] = encr.crypto_support['ap'];
encr.crypto_support['sta-wds'] = encr.crypto_support['sta'];
encr.validate = function(section_id, value) {
var modeopt = this.section.children.filter(function(o) { return o.option == 'mode' })[0],
modeval = modeopt.formvalue(section_id),
modetitle = modeopt.vallist[modeopt.keylist.indexOf(modeval)],
enctitle = this.vallist[this.keylist.indexOf(value)];
if (value == 'none')
return true;
if (!L.isObject(this.crypto_support[modeval]) || !this.crypto_support[modeval].hasOwnProperty(value))
return _('The selected %s mode is incompatible with %s encryption').format(modetitle, enctitle);
return this.crypto_support[modeval][value];
};
}
else if (hwtype == 'broadcom') {
crypto_modes.push(['psk2', 'WPA2-PSK', 33]);
crypto_modes.push(['psk+psk2', 'WPA-PSK/WPA2-PSK Mixed Mode', 22]);
crypto_modes.push(['psk', 'WPA-PSK', 21]);
}
crypto_modes.push(['wep-open', _('WEP Open System'), 11]);
crypto_modes.push(['wep-shared', _('WEP Shared Key'), 10]);
crypto_modes.push(['none', _('No Encryption'), 0]);
crypto_modes.sort(function(a, b) { return b[2] - a[2] });
for (var i = 0; i < crypto_modes.length; i++) {
var security_level = (crypto_modes[i][2] >= 30) ? _('strong security')
: (crypto_modes[i][2] >= 20) ? _('medium security')
: (crypto_modes[i][2] >= 10) ? _('weak security') : _('open network');
encr.value(crypto_modes[i][0], '%s (%s)'.format(crypto_modes[i][1], security_level));
}
o = ss.taboption('encryption', form.Value, 'auth_server', _('Radius-Authentication-Server'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.rmempty = true;
o.datatype = 'host(0)';
o = ss.taboption('encryption', form.Value, 'auth_port', _('Radius-Authentication-Port'), _('Default %d').format(1812));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.rmempty = true;
o.datatype = 'port';
o = ss.taboption('encryption', form.Value, 'auth_secret', _('Radius-Authentication-Secret'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.rmempty = true;
o.password = true;
o = ss.taboption('encryption', form.Value, 'acct_server', _('Radius-Accounting-Server'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.rmempty = true;
o.datatype = 'host(0)';
o = ss.taboption('encryption', form.Value, 'acct_port', _('Radius-Accounting-Port'), _('Default %d').format(1813));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.rmempty = true;
o.datatype = 'port';
o = ss.taboption('encryption', form.Value, 'acct_secret', _('Radius-Accounting-Secret'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.rmempty = true;
o.password = true;
o = ss.taboption('encryption', form.Value, 'dae_client', _('DAE-Client'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.rmempty = true;
o.datatype = 'host(0)';
o = ss.taboption('encryption', form.Value, 'dae_port', _('DAE-Port'), _('Default %d').format(3799));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.rmempty = true;
o.datatype = 'port';
o = ss.taboption('encryption', form.Value, 'dae_secret', _('DAE-Secret'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.rmempty = true;
o.password = true;
o = ss.taboption('encryption', form.Value, '_wpa_key', _('Key'));
o.depends('encryption', 'psk');
o.depends('encryption', 'psk2');
o.depends('encryption', 'psk+psk2');
o.depends('encryption', 'psk-mixed');
o.depends('encryption', 'sae');
o.depends('encryption', 'sae-mixed');
o.datatype = 'wpakey';
o.rmempty = true;
o.password = true;
o.cfgvalue = function(section_id) {
var key = uci.get('wireless', section_id, 'key');
return /^[1234]$/.test(key) ? null : key;
};
o.write = function(section_id, value) {
uci.set('wireless', section_id, 'key', value);
uci.unset('wireless', section_id, 'key1');
uci.unset('wireless', section_id, 'key2');
uci.unset('wireless', section_id, 'key3');
uci.unset('wireless', section_id, 'key4');
};
o = ss.taboption('encryption', form.ListValue, '_wep_key', _('Used Key Slot'));
o.depends('encryption', 'wep-open');
o.depends('encryption', 'wep-shared');
o.value('1', _('Key #%d').format(1));
o.value('2', _('Key #%d').format(2));
o.value('3', _('Key #%d').format(3));
o.value('4', _('Key #%d').format(4));
o.cfgvalue = function(section_id) {
var slot = +uci.get('wireless', section_id, 'key');
return (slot >= 1 && slot <= 4) ? String(slot) : '';
};
o.write = function(section_id, value) {
uci.set('wireless', section_id, 'key', value);
};
for (var slot = 1; slot <= 4; slot++) {
o = ss.taboption('encryption', form.Value, 'key%d'.format(slot), _('Key #%d').format(slot));
o.depends('encryption', 'wep-open');
o.depends('encryption', 'wep-shared');
o.datatype = 'wepkey';
o.rmempty = true;
o.password = true;
o.write = function(section_id, value) {
if (value != null && (value.length == 5 || value.length == 13))
value = 's:%s'.format(value);
uci.set('wireless', section_id, this.option, value);
};
}
if (hwtype == 'mac80211') {
// Probe 802.11r support (and EAP support as a proxy for Openwrt)
var has_80211r = L.hasSystemFeature('hostapd', '11r') || L.hasSystemFeature('hostapd', 'eap');
o = ss.taboption('encryption', form.Flag, 'ieee80211r', _('802.11r Fast Transition'), _('Enables fast roaming among access points that belong to the same Mobility Domain'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
if (has_80211r)
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['psk', 'psk2', 'psk-mixed', 'sae', 'sae-mixed'] });
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'nasid', _('NAS ID'), _('Used for two different purposes: RADIUS NAS ID and 802.11r R0KH-ID. Not needed with normal WPA(2)-PSK.'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.depends({ ieee80211r: '1' });
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'mobility_domain', _('Mobility Domain'), _('4-character hexadecimal ID'));
o.depends({ ieee80211r: '1' });
o.placeholder = '4f57';
o.datatype = 'and(hexstring,length(4))';
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'reassociation_deadline', _('Reassociation Deadline'), _('time units (TUs / 1.024 ms) [1000-65535]'));
o.depends({ ieee80211r: '1' });
o.placeholder = '1000';
o.datatype = 'range(1000,65535)';
o.rmempty = true;
o = ss.taboption('encryption', form.ListValue, 'ft_over_ds', _('FT protocol'));
o.depends({ ieee80211r: '1' });
o.value('1', _('FT over DS'));
o.value('0', _('FT over the Air'));
o.rmempty = true;
o = ss.taboption('encryption', form.Flag, 'ft_psk_generate_local', _('Generate PMK locally'), _('When using a PSK, the PMK can be automatically generated. When enabled, the R0/R1 key options below are not applied. Disable this to use the R0 and R1 key options.'));
o.depends({ ieee80211r: '1' });
o.default = o.enabled;
o.rmempty = false;
o = ss.taboption('encryption', form.Value, 'r0_key_lifetime', _('R0 Key Lifetime'), _('minutes'));
o.depends({ ieee80211r: '1' });
o.placeholder = '10000';
o.datatype = 'uinteger';
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'r1_key_holder', _('R1 Key Holder'), _('6-octet identifier as a hex string - no colons'));
o.depends({ ieee80211r: '1' });
o.placeholder = '00004f577274';
o.datatype = 'and(hexstring,length(12))';
o.rmempty = true;
o = ss.taboption('encryption', form.Flag, 'pmk_r1_push', _('PMK R1 Push'));
o.depends({ ieee80211r: '1' });
o.placeholder = '0';
o.rmempty = true;
o = ss.taboption('encryption', form.DynamicList, 'r0kh', _('External R0 Key Holder List'), _('List of R0KHs in the same Mobility Domain. <br />Format: MAC-address,NAS-Identifier,128-bit key as hex string. <br />This list is used to map R0KH-ID (NAS Identifier) to a destination MAC address when requesting PMK-R1 key from the R0KH that the STA used during the Initial Mobility Domain Association.'));
o.depends({ ieee80211r: '1' });
o.rmempty = true;
o = ss.taboption('encryption', form.DynamicList, 'r1kh', _('External R1 Key Holder List'), _ ('List of R1KHs in the same Mobility Domain. <br />Format: MAC-address,R1KH-ID as 6 octets with colons,128-bit key as hex string. <br />This list is used to map R1KH-ID to a destination MAC address when sending PMK-R1 key from the R0KH. This is also the list of authorized R1KHs in the MD that can request PMK-R1 keys.'));
o.depends({ ieee80211r: '1' });
o.rmempty = true;
// End of 802.11r options
o = ss.taboption('encryption', form.ListValue, 'eap_type', _('EAP-Method'));
o.value('tls', 'TLS');
o.value('ttls', 'TTLS');
o.value('peap', 'PEAP');
o.value('fast', 'FAST');
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o = ss.taboption('encryption', form.Flag, 'ca_cert_usesystem', _('Use system certificates'), _("Validate server certificate using built-in system CA bundle,<br />requires the \"ca-bundle\" package"));
o.enabled = '1';
o.disabled = '0';
o.default = o.disabled;
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.validate = function(section_id, value) {
if (value == '1' && !L.hasSystemFeature('cabundle')) {
return _("This option cannot be used because the ca-bundle package is not installed.");
}
return true;
};
o = ss.taboption('encryption', form.FileUpload, 'ca_cert', _('Path to CA-Certificate'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], ca_cert_usesystem: ['0'] });
o = ss.taboption('encryption', form.Value, 'subject_match', _('Certificate constraint (Subject)'), _("Certificate constraint substring - e.g. /CN=wifi.mycompany.com<br />See `logread -f` during handshake for actual values"));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o = ss.taboption('encryption', form.DynamicList, 'altsubject_match', _('Certificate constraint (SAN)'), _("Certificate constraint(s) via Subject Alternate Name values<br />(supported attributes: EMAIL, DNS, URI) - e.g. DNS:wifi.mycompany.com"));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o = ss.taboption('encryption', form.DynamicList, 'domain_match', _('Certificate constraint (Domain)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (exact match)"));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o = ss.taboption('encryption', form.DynamicList, 'domain_suffix_match', _('Certificate constraint (Wildcard)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (suffix match)"));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o = ss.taboption('encryption', form.FileUpload, 'client_cert', _('Path to Client-Certificate'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['tls'] });
o = ss.taboption('encryption', form.FileUpload, 'priv_key', _('Path to Private Key'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['tls'] });
o = ss.taboption('encryption', form.Value, 'priv_key_pwd', _('Password of Private Key'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['tls'] });
o.password = true;
o = ss.taboption('encryption', form.ListValue, 'auth', _('Authentication'));
o.value('PAP', 'PAP');
o.value('CHAP', 'CHAP');
o.value('MSCHAP', 'MSCHAP');
o.value('MSCHAPV2', 'MSCHAPv2');
o.value('EAP-GTC', 'EAP-GTC');
o.value('EAP-MD5', 'EAP-MD5');
o.value('EAP-MSCHAPV2', 'EAP-MSCHAPv2');
o.value('EAP-TLS', 'EAP-TLS');
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['fast', 'peap', 'ttls'] });
o.validate = function(section_id, value) {
var eo = this.section.children.filter(function(o) { return o.option == 'eap_type' })[0],
ev = eo.formvalue(section_id);
if (ev != 'ttls' && (value == 'PAP' || value == 'CHAP' || value == 'MSCHAP' || value == 'MSCHAPV2'))
return _('This authentication type is not applicable to the selected EAP method.');
return true;
};
o = ss.taboption('encryption', form.Flag, 'ca_cert2_usesystem', _('Use system certificates for inner-tunnel'), _("Validate server certificate using built-in system CA bundle,<br />requires the \"ca-bundle\" package"));
o.enabled = '1';
o.disabled = '0';
o.default = o.disabled;
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
o.validate = function(section_id, value) {
if (value == '1' && !L.hasSystemFeature('cabundle')) {
return _("This option cannot be used because the ca-bundle package is not installed.");
}
return true;
};
o = ss.taboption('encryption', form.FileUpload, 'ca_cert2', _('Path to inner CA-Certificate'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'], ca_cert2_usesystem: ['0'] });
o = ss.taboption('encryption', form.Value, 'subject_match2', _('Inner certificate constraint (Subject)'), _("Certificate constraint substring - e.g. /CN=wifi.mycompany.com<br />See `logread -f` during handshake for actual values"));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
o = ss.taboption('encryption', form.DynamicList, 'altsubject_match2', _('Inner certificate constraint (SAN)'), _("Certificate constraint(s) via Subject Alternate Name values<br />(supported attributes: EMAIL, DNS, URI) - e.g. DNS:wifi.mycompany.com"));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
o = ss.taboption('encryption', form.DynamicList, 'domain_match2', _('Inner certificate constraint (Domain)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (exact match)"));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
o = ss.taboption('encryption', form.DynamicList, 'domain_suffix_match2', _('Inner certificate constraint (Wildcard)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (suffix match)"));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
o = ss.taboption('encryption', form.FileUpload, 'client_cert2', _('Path to inner Client-Certificate'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
o = ss.taboption('encryption', form.FileUpload, 'priv_key2', _('Path to inner Private Key'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
o = ss.taboption('encryption', form.Value, 'priv_key2_pwd', _('Password of inner Private Key'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
o.password = true;
o = ss.taboption('encryption', form.Value, 'identity', _('Identity'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['fast', 'peap', 'tls', 'ttls'] });
o = ss.taboption('encryption', form.Value, 'anonymous_identity', _('Anonymous Identity'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['fast', 'peap', 'tls', 'ttls'] });
o = ss.taboption('encryption', form.Value, 'password', _('Password'));
add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['fast', 'peap', 'ttls'] });
o.password = true;
if (hwtype == 'mac80211') {
// ieee802.11w options
if (L.hasSystemFeature('hostapd', '11w')) {
o = ss.taboption('encryption', form.ListValue, 'ieee80211w', _('802.11w Management Frame Protection'), _("Requires the 'full' version of wpad/hostapd and support from the wifi driver <br />(as of Jan 2019: ath9k, ath10k, mwlwifi and mt76)"));
o.value('', _('Disabled'));
o.value('1', _('Optional'));
o.value('2', _('Required'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds', 'sta', 'sta-wds'], encryption: ['owe', 'psk2', 'psk-mixed', 'sae', 'sae-mixed', 'wpa2', 'wpa3', 'wpa3-mixed'] });
o.defaults = {
'2': [{ encryption: 'sae' }, { encryption: 'owe' }, { encryption: 'wpa3' }, { encryption: 'wpa3-mixed' }],
'1': [{ encryption: 'sae-mixed'}],
'': []
};
o = ss.taboption('encryption', form.Value, 'ieee80211w_max_timeout', _('802.11w maximum timeout'), _('802.11w Association SA Query maximum timeout'));
o.depends('ieee80211w', '1');
o.depends('ieee80211w', '2');
o.datatype = 'uinteger';
o.placeholder = '1000';
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'ieee80211w_retry_timeout', _('802.11w retry timeout'), _('802.11w Association SA Query retry timeout'));
o.depends('ieee80211w', '1');
o.depends('ieee80211w', '2');
o.datatype = 'uinteger';
o.placeholder = '201';
o.rmempty = true;
};
o = ss.taboption('encryption', form.Flag, 'wpa_disable_eapol_key_retries', _('Enable key reinstallation (KRACK) countermeasures'), _('Complicates key reinstallation attacks on the client side by disabling retransmission of EAPOL-Key frames that are used to install keys. This workaround might cause interoperability issues and reduced robustness of key negotiation especially in environments with heavy traffic load.'));
add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['psk2', 'psk-mixed', 'sae', 'sae-mixed', 'wpa2', 'wpa3', 'wpa3-mixed'] });
if (L.hasSystemFeature('hostapd', 'cli') && L.hasSystemFeature('wpasupplicant')) {
o = ss.taboption('encryption', form.Flag, 'wps_pushbutton', _('Enable WPS pushbutton, requires WPA(2)-PSK/WPA3-SAE'))
o.enabled = '1';
o.disabled = '0';
o.default = o.disabled;
o.depends('encryption', 'psk');
o.depends('encryption', 'psk2');
o.depends('encryption', 'psk-mixed');
o.depends('encryption', 'sae');
o.depends('encryption', 'sae-mixed');
}
}
}
});
};
s.handleRemove = function(section_id, ev) {
document.querySelector('.cbi-section-table-row[data-sid="%s"]'.format(section_id)).style.opacity = 0.5;
return form.TypedSection.prototype.handleRemove.apply(this, [section_id, ev]);
};
s.handleScan = function(radioDev, ev) {
var table = E('div', { 'class': 'table' }, [
E('div', { 'class': 'tr table-titles' }, [
E('div', { 'class': 'th col-2 middle center' }, _('Signal')),
E('div', { 'class': 'th col-4 middle left' }, _('SSID')),
E('div', { 'class': 'th col-2 middle center hide-xs' }, _('Channel')),
E('div', { 'class': 'th col-2 middle left hide-xs' }, _('Mode')),
E('div', { 'class': 'th col-3 middle left hide-xs' }, _('BSSID')),
E('div', { 'class': 'th col-3 middle left' }, _('Encryption')),
E('div', { 'class': 'th cbi-section-actions right' }, ' '),
])
]);
var stop = E('button', {
'class': 'btn',
'click': L.bind(this.handleScanStartStop, this),
'style': 'display:none',
'data-state': 'stop'
}, _('Stop refresh'));
cbi_update_table(table, [], E('em', { class: 'spinning' }, _('Starting wireless scan...')));
var md = ui.showModal(_('Join Network: Wireless Scan'), [
table,
E('div', { 'class': 'right' }, [
stop,
' ',
E('button', {
'class': 'btn',
'click': L.bind(this.handleScanAbort, this)
}, _('Dismiss'))
])
]);
md.style.maxWidth = '90%';
md.style.maxHeight = 'none';
this.pollFn = L.bind(this.handleScanRefresh, this, radioDev, {}, table, stop);
L.Poll.add(this.pollFn);
L.Poll.start();
};
s.handleScanRefresh = function(radioDev, scanCache, table, stop) {
return radioDev.getScanList().then(L.bind(function(results) {
var rows = [];
for (var i = 0; i < results.length; i++)
scanCache[results[i].bssid] = results[i];
for (var k in scanCache)
if (scanCache[k].stale)
results.push(scanCache[k]);
results.sort(function(a, b) {
var diff = (b.quality - a.quality) || (a.channel - b.channel);
if (diff)
return diff;
if (a.ssid < b.ssid)
return -1;
else if (a.ssid > b.ssid)
return 1;
if (a.bssid < b.bssid)
return -1;
else if (a.bssid > b.bssid)
return 1;
});
for (var i = 0; i < results.length; i++) {
var res = results[i],
qv = res.quality || 0,
qm = res.quality_max || 0,
q = (qv > 0 && qm > 0) ? Math.floor((100 / qm) * qv) : 0,
s = res.stale ? 'opacity:0.5' : '';
rows.push([
E('span', { 'style': s }, render_signal_badge(q, res.signal, res.noise)),
E('span', { 'style': s }, (res.ssid != null) ? '%h'.format(res.ssid) : E('em', _('hidden'))),
E('span', { 'style': s }, '%d'.format(res.channel)),
E('span', { 'style': s }, '%h'.format(res.mode)),
E('span', { 'style': s }, '%h'.format(res.bssid)),
E('span', { 'style': s }, '%h'.format(network.formatWifiEncryption(res.encryption))),
E('div', { 'class': 'right' }, E('button', {
'class': 'cbi-button cbi-button-action important',
'click': L.bind(this.handleJoin, this, radioDev, res)
}, _('Join Network')))
]);
res.stale = true;
}
cbi_update_table(table, rows);
stop.disabled = false;
stop.style.display = '';
stop.classList.remove('spinning');
}, this));
};
s.handleScanStartStop = function(ev) {
var btn = ev.currentTarget;
if (btn.getAttribute('data-state') == 'stop') {
L.Poll.remove(this.pollFn);
btn.firstChild.data = _('Start refresh');
btn.setAttribute('data-state', 'start');
}
else {
L.Poll.add(this.pollFn);
btn.firstChild.data = _('Stop refresh');
btn.setAttribute('data-state', 'stop');
btn.classList.add('spinning');
btn.disabled = true;
}
};
s.handleScanAbort = function(ev) {
var md = L.dom.parent(ev.target, 'div[aria-modal="true"]');
if (md) {
md.style.maxWidth = '';
md.style.maxHeight = '';
}
ui.hideModal();
L.Poll.remove(this.pollFn);
this.pollFn = null;
};
s.handleJoinConfirm = function(radioDev, bss, form, ev) {
var nameopt = L.toArray(form.lookupOption('name', '_new_'))[0],
passopt = L.toArray(form.lookupOption('password', '_new_'))[0],
bssidopt = L.toArray(form.lookupOption('bssid', '_new_'))[0],
zoneopt = L.toArray(form.lookupOption('zone', '_new_'))[0],
replopt = L.toArray(form.lookupOption('replace', '_new_'))[0],
nameval = (nameopt && nameopt.isValid('_new_')) ? nameopt.formvalue('_new_') : null,
passval = (passopt && passopt.isValid('_new_')) ? passopt.formvalue('_new_') : null,
bssidval = (bssidopt && bssidopt.isValid('_new_')) ? bssidopt.formvalue('_new_') : null,
zoneval = zoneopt ? zoneopt.formvalue('_new_') : null,
enc = L.isObject(bss.encryption) ? bss.encryption : null,
is_wep = (enc && Array.isArray(enc.wep)),
is_psk = (enc && Array.isArray(enc.wpa) && L.toArray(enc.authentication).filter(function(a) { return a == 'psk' })),
is_sae = (enc && Array.isArray(enc.wpa) && L.toArray(enc.authentication).filter(function(a) { return a == 'sae' }));
if (nameval == null || (passopt && passval == null))
return;
var section_id = null;
return this.map.save(function() {
var wifi_sections = uci.sections('wireless', 'wifi-iface');
if (replopt.formvalue('_new_') == '1') {
for (var i = 0; i < wifi_sections.length; i++)
if (wifi_sections[i].device == radioDev.getName())
uci.remove('wireless', wifi_sections[i]['.name']);
}
if (uci.get('wireless', radioDev.getName(), 'disabled') == '1') {
for (var i = 0; i < wifi_sections.length; i++)
if (wifi_sections[i].device == radioDev.getName())
uci.set('wireless', wifi_sections[i]['.name'], 'disabled', '1');
uci.unset('wireless', radioDev.getName(), 'disabled');
}
section_id = next_free_sid(wifi_sections.length);
uci.add('wireless', 'wifi-iface', section_id);
uci.set('wireless', section_id, 'device', radioDev.getName());
uci.set('wireless', section_id, 'mode', (bss.mode == 'Ad-Hoc') ? 'adhoc' : 'sta');
uci.set('wireless', section_id, 'network', nameval);
if (bss.ssid != null) {
uci.set('wireless', section_id, 'ssid', bss.ssid);
if (bssidval == '1')
uci.set('wireless', section_id, 'bssid', bss.bssid);
}
else if (bss.bssid != null) {
uci.set('wireless', section_id, 'bssid', bss.bssid);
}
if (is_sae) {
uci.set('wireless', section_id, 'encryption', 'sae');
uci.set('wireless', section_id, 'key', passval);
}
else if (is_psk) {
for (var i = enc.wpa.length - 1; i >= 0; i--) {
if (enc.wpa[i] == 2) {
uci.set('wireless', section_id, 'encryption', 'psk2');
break;
}
else if (enc.wpa[i] == 1) {
uci.set('wireless', section_id, 'encryption', 'psk');
break;
}
}
uci.set('wireless', section_id, 'key', passval);
}
else if (is_wep) {
uci.set('wireless', section_id, 'encryption', 'wep-open');
uci.set('wireless', section_id, 'key', '1');
uci.set('wireless', section_id, 'key1', passval);
}
else {
uci.set('wireless', section_id, 'encryption', 'none');
}
return network.addNetwork(nameval, { proto: 'dhcp' }).then(function(net) {
firewall.deleteNetwork(net.getName());
var zonePromise = zoneval
? firewall.getZone(zoneval).then(function(zone) { return zone || firewall.addZone(zoneval) })
: Promise.resolve();
return zonePromise.then(function(zone) {
if (zone)
zone.addNetwork(net.getName());
});
});
}).then(L.bind(function() {
return this.renderMoreOptionsModal(section_id);
}, this));
};
s.handleJoin = function(radioDev, bss, ev) {
this.handleScanAbort(ev);
var m2 = new form.Map('wireless'),
s2 = m2.section(form.NamedSection, '_new_'),
enc = L.isObject(bss.encryption) ? bss.encryption : null,
is_wep = (enc && Array.isArray(enc.wep)),
is_psk = (enc && Array.isArray(enc.wpa) && L.toArray(enc.authentication).filter(function(a) { return a == 'psk' || a == 'sae' })),
replace, passphrase, name, bssid, zone;
var nameUsed = function(name) {
var s = uci.get('network', name);
if (s != null && s['.type'] != 'interface')
return true;
var net = (s != null) ? network.instantiateNetwork(name) : null;
return (net != null && !net.isEmpty());
};
s2.render = function() {
return Promise.all([
{},
this.renderUCISection('_new_')
]).then(this.renderContents.bind(this));
};
replace = s2.option(form.Flag, 'replace', _('Replace wireless configuration'), _('Check this option to delete the existing networks from this radio.'));
name = s2.option(form.Value, 'name', _('Name of the new network'), _('The allowed characters are: <code>A-Z</code>, <code>a-z</code>, <code>0-9</code> and <code>_</code>'));
name.datatype = 'uciname';
name.default = 'wwan';
name.rmempty = false;
name.validate = function(section_id, value) {
if (nameUsed(value))
return _('The network name is already used');
return true;
};
for (var i = 2; nameUsed(name.default); i++)
name.default = 'wwan%d'.format(i);
if (is_wep || is_psk) {
passphrase = s2.option(form.Value, 'password', is_wep ? _('WEP passphrase') : _('WPA passphrase'), _('Specify the secret encryption key here.'));
passphrase.datatype = is_wep ? 'wepkey' : 'wpakey';
passphrase.password = true;
passphrase.rmempty = false;
}
if (bss.ssid != null) {
bssid = s2.option(form.Flag, 'bssid', _('Lock to BSSID'), _('Instead of joining any network with a matching SSID, only connect to the BSSID <code>%h</code>.').format(bss.bssid));
bssid.default = '0';
}
zone = s2.option(widgets.ZoneSelect, 'zone', _('Create / Assign firewall-zone'), _('Choose the firewall zone you want to assign to this interface. Select <em>unspecified</em> to remove the interface from the associated zone or fill out the <em>create</em> field to define a new zone and attach the interface to it.'));
zone.default = 'wan';
return m2.render().then(L.bind(function(nodes) {
ui.showModal(_('Joining Network: %q').replace(/%q/, '"%h"'.format(bss.ssid)), [
nodes,
E('div', { 'class': 'right' }, [
E('button', {
'class': 'btn',
'click': ui.hideModal
}, _('Cancel')), ' ',
E('button', {
'class': 'cbi-button cbi-button-positive important',
'click': ui.createHandlerFn(this, 'handleJoinConfirm', radioDev, bss, m2)
}, _('Submit'))
])
], 'cbi-modal').querySelector('[id="%s"] input[class][type]'.format((passphrase || name).cbid('_new_'))).focus();
}, this));
};
s.handleAdd = function(radioDev, ev) {
var section_id = next_free_sid(uci.sections('wireless', 'wifi-iface').length);
uci.unset('wireless', radioDev.getName(), 'disabled');
uci.add('wireless', 'wifi-iface', section_id);
uci.set('wireless', section_id, 'device', radioDev.getName());
uci.set('wireless', section_id, 'mode', 'ap');
uci.set('wireless', section_id, 'ssid', 'OpenWrt');
uci.set('wireless', section_id, 'encryption', 'none');
this.addedSection = section_id;
return this.renderMoreOptionsModal(section_id);
};
o = s.option(form.DummyValue, '_badge');
o.modalonly = false;
o.textvalue = function(section_id) {
var inst = this.section.lookupRadioOrNetwork(section_id),
node = E('div', { 'class': 'center' });
if (inst.getWifiNetworks)
node.appendChild(render_radio_badge(inst));
else
node.appendChild(render_network_badge(inst));
return node;
};
o = s.option(form.DummyValue, '_stat');
o.modalonly = false;
o.textvalue = function(section_id) {
var inst = this.section.lookupRadioOrNetwork(section_id);
if (inst.getWifiNetworks)
return render_radio_status(inst, this.section.wifis.filter(function(e) {
return (e.getWifiDeviceName() == inst.getName());
}));
else
return render_network_status(inst);
};
return m.render().then(L.bind(function(m, nodes) {
L.Poll.add(L.bind(function() {
var section_ids = m.children[0].cfgsections(),
tasks = [ network.getHostHints(), network.getWifiDevices() ];
for (var i = 0; i < section_ids.length; i++) {
var row = nodes.querySelector('.cbi-section-table-row[data-sid="%s"]'.format(section_ids[i])),
dsc = row.querySelector('[data-name="_stat"] > div'),
btns = row.querySelectorAll('.cbi-section-actions button');
if (dsc.getAttribute('restart') == '') {
dsc.setAttribute('restart', '1');
tasks.push(fs.exec('/sbin/wifi', ['up', section_ids[i]]).catch(function(e) {
ui.addNotification(null, E('p', e.message));
}));
}
else if (dsc.getAttribute('restart') == '1') {
dsc.removeAttribute('restart');
btns[0].classList.remove('spinning');
btns[0].disabled = false;
}
}
return Promise.all(tasks)
.then(L.bind(function(hosts_radios) {
var tasks = [];
for (var i = 0; i < hosts_radios[1].length; i++)
tasks.push(hosts_radios[1][i].getWifiNetworks());
return Promise.all(tasks).then(function(data) {
hosts_radios[2] = [];
for (var i = 0; i < data.length; i++)
hosts_radios[2].push.apply(hosts_radios[2], data[i]);
return hosts_radios;
});
}, network))
.then(L.bind(function(hosts_radios_wifis) {
var tasks = [];
for (var i = 0; i < hosts_radios_wifis[2].length; i++)
tasks.push(hosts_radios_wifis[2][i].getAssocList());
return Promise.all(tasks).then(function(data) {
hosts_radios_wifis[3] = [];
for (var i = 0; i < data.length; i++) {
var wifiNetwork = hosts_radios_wifis[2][i],
radioDev = hosts_radios_wifis[1].filter(function(d) { return d.getName() == wifiNetwork.getWifiDeviceName() })[0];
for (var j = 0; j < data[i].length; j++)
hosts_radios_wifis[3].push(Object.assign({ radio: radioDev, network: wifiNetwork }, data[i][j]));
}
return hosts_radios_wifis;
});
}, network))
.then(L.bind(this.poll_status, this, nodes));
}, this), 5);
var table = E('div', { 'class': 'table assoclist', 'id': 'wifi_assoclist_table' }, [
E('div', { 'class': 'tr table-titles' }, [
E('div', { 'class': 'th nowrap' }, _('Network')),
E('div', { 'class': 'th hide-xs' }, _('MAC-Address')),
E('div', { 'class': 'th' }, _('Host')),
E('div', { 'class': 'th' }, _('Signal / Noise')),
E('div', { 'class': 'th' }, _('RX Rate / TX Rate'))
])
]);
cbi_update_table(table, [], E('em', { 'class': 'spinning' }, _('Collecting data...')))
return E([ nodes, E('h3', _('Associated Stations')), table ]);
}, this, m));
}
});
| modules/luci-mod-network/htdocs/luci-static/resources/view/network/wireless.js | 'use strict';
'require fs';
'require ui';
'require rpc';
'require uci';
'require form';
'require network';
'require firewall';
'require tools.widgets as widgets';
function count_changes(section_id) {
var changes = ui.changes.changes, n = 0;
if (!L.isObject(changes))
return n;
if (Array.isArray(changes.wireless))
for (var i = 0; i < changes.wireless.length; i++)
n += (changes.wireless[i][1] == section_id);
return n;
}
function render_radio_badge(radioDev) {
return E('span', { 'class': 'ifacebadge' }, [
E('img', { 'src': L.resource('icons/wifi%s.png').format(radioDev.isUp() ? '' : '_disabled') }),
' ',
radioDev.getName()
]);
}
function render_signal_badge(signalPercent, signalValue, noiseValue, wrap) {
var icon, title, value;
if (signalPercent < 0)
icon = L.resource('icons/signal-none.png');
else if (signalPercent == 0)
icon = L.resource('icons/signal-0.png');
else if (signalPercent < 25)
icon = L.resource('icons/signal-0-25.png');
else if (signalPercent < 50)
icon = L.resource('icons/signal-25-50.png');
else if (signalPercent < 75)
icon = L.resource('icons/signal-50-75.png');
else
icon = L.resource('icons/signal-75-100.png');
if (signalValue != null && signalValue != 0 && noiseValue != null && noiseValue != 0) {
value = '%d/%d\xa0%s'.format(signalValue, noiseValue, _('dBm'));
title = '%s: %d %s / %s: %d %s / %s %d'.format(
_('Signal'), signalValue, _('dBm'),
_('Noise'), noiseValue, _('dBm'),
_('SNR'), signalValue - noiseValue);
}
else if (signalValue != null && signalValue != 0) {
value = '%d %s'.format(signalValue, _('dBm'));
title = '%s: %d %s'.format(_('Signal'), signalValue, _('dBm'));
}
else if (signalPercent > -1) {
value = '\xa0---\xa0';
title = _('No signal');
}
else {
value = E('em', {}, E('small', {}, [ _('disabled') ]));
title = _('Interface is disabled');
}
return E('div', {
'class': wrap ? 'center' : 'ifacebadge',
'title': title,
'data-signal': signalValue,
'data-noise': noiseValue
}, [
E('img', { 'src': icon }),
E('span', {}, [
wrap ? E('br') : ' ',
value
])
]);
}
function render_network_badge(radioNet) {
return render_signal_badge(radioNet.isUp() ? radioNet.getSignalPercent() : -1, radioNet.getSignal(), radioNet.getNoise());
}
function render_radio_status(radioDev, wifiNets) {
var name = radioDev.getI18n().replace(/ Wireless Controller .+$/, ''),
node = E('div', [ E('big', {}, E('strong', {}, name)), E('div') ]),
channel, frequency, bitrate;
for (var i = 0; i < wifiNets.length; i++) {
channel = channel || wifiNets[i].getChannel();
frequency = frequency || wifiNets[i].getFrequency();
bitrate = bitrate || wifiNets[i].getBitRate();
}
if (radioDev.isUp())
L.itemlist(node.lastElementChild, [
_('Channel'), '%s (%s %s)'.format(channel || '?', frequency || '?', _('GHz')),
_('Bitrate'), '%s %s'.format(bitrate || '?', _('Mbit/s'))
], ' | ');
else
node.lastElementChild.appendChild(E('em', _('Device is not active')));
return node;
}
function render_network_status(radioNet) {
var mode = radioNet.getActiveMode(),
bssid = radioNet.getActiveBSSID(),
channel = radioNet.getChannel(),
disabled = (radioNet.get('disabled') == '1' || uci.get('wireless', radioNet.getWifiDeviceName(), 'disabled') == '1'),
is_assoc = (bssid && bssid != '00:00:00:00:00:00' && channel && mode != 'Unknown' && !disabled),
is_mesh = (radioNet.getMode() == 'mesh'),
changecount = count_changes(radioNet.getName()),
status_text = null;
if (changecount)
status_text = E('a', {
href: '#',
click: L.bind(ui.changes.displayChanges, ui.changes)
}, _('Interface has %d pending changes').format(changecount));
else if (!is_assoc)
status_text = E('em', disabled ? _('Wireless is disabled') : _('Wireless is not associated'));
return L.itemlist(E('div'), [
is_mesh ? _('Mesh ID') : _('SSID'), (is_mesh ? radioNet.getMeshID() : radioNet.getSSID()) || '?',
_('Mode'), mode,
_('BSSID'), (!changecount && is_assoc) ? bssid : null,
_('Encryption'), (!changecount && is_assoc) ? radioNet.getActiveEncryption() || _('None') : null,
null, status_text
], [ ' | ', E('br') ]);
}
function render_modal_status(node, radioNet) {
var mode = radioNet.getActiveMode(),
noise = radioNet.getNoise(),
bssid = radioNet.getActiveBSSID(),
channel = radioNet.getChannel(),
disabled = (radioNet.get('disabled') == '1'),
is_assoc = (bssid && bssid != '00:00:00:00:00:00' && channel && mode != 'Unknown' && !disabled);
if (node == null)
node = E('span', { 'class': 'ifacebadge large', 'data-network': radioNet.getName() }, [ E('small'), E('span') ]);
L.dom.content(node.firstElementChild, render_signal_badge(disabled ? -1 : radioNet.getSignalPercent(), radioNet.getSignal(), noise, true));
L.itemlist(node.lastElementChild, [
_('Mode'), mode,
_('SSID'), radioNet.getSSID() || '?',
_('BSSID'), is_assoc ? bssid : null,
_('Encryption'), is_assoc ? radioNet.getActiveEncryption() || _('None') : null,
_('Channel'), is_assoc ? '%d (%.3f %s)'.format(radioNet.getChannel(), radioNet.getFrequency() || 0, _('GHz')) : null,
_('Tx-Power'), is_assoc ? '%d %s'.format(radioNet.getTXPower(), _('dBm')) : null,
_('Signal'), is_assoc ? '%d %s'.format(radioNet.getSignal(), _('dBm')) : null,
_('Noise'), (is_assoc && noise != null) ? '%d %s'.format(noise, _('dBm')) : null,
_('Bitrate'), is_assoc ? '%.1f %s'.format(radioNet.getBitRate() || 0, _('Mbit/s')) : null,
_('Country'), is_assoc ? radioNet.getCountryCode() : null
], [ ' | ', E('br'), E('br'), E('br'), E('br'), E('br'), ' | ', E('br'), ' | ' ]);
if (!is_assoc)
L.dom.append(node.lastElementChild, E('em', disabled ? _('Wireless is disabled') : _('Wireless is not associated')));
return node;
}
function format_wifirate(rate) {
var s = '%.1f\xa0%s, %d\xa0%s'.format(rate.rate / 1000, _('Mbit/s'), rate.mhz, _('MHz')),
ht = rate.ht, vht = rate.vht,
mhz = rate.mhz, nss = rate.nss,
mcs = rate.mcs, sgi = rate.short_gi;
if (ht || vht) {
if (vht) s += ', VHT-MCS\xa0%d'.format(mcs);
if (nss) s += ', VHT-NSS\xa0%d'.format(nss);
if (ht) s += ', MCS\xa0%s'.format(mcs);
if (sgi) s += ', ' + _('Short GI').replace(/ /g, '\xa0');
}
return s;
}
function radio_restart(id, ev) {
var row = document.querySelector('.cbi-section-table-row[data-sid="%s"]'.format(id)),
dsc = row.querySelector('[data-name="_stat"] > div'),
btn = row.querySelector('.cbi-section-actions button');
btn.blur();
btn.classList.add('spinning');
btn.disabled = true;
dsc.setAttribute('restart', '');
L.dom.content(dsc, E('em', _('Device is restarting…')));
}
function network_updown(id, map, ev) {
var radio = uci.get('wireless', id, 'device'),
disabled = (uci.get('wireless', id, 'disabled') == '1') ||
(uci.get('wireless', radio, 'disabled') == '1');
if (disabled) {
uci.unset('wireless', id, 'disabled');
uci.unset('wireless', radio, 'disabled');
}
else {
uci.set('wireless', id, 'disabled', '1');
var all_networks_disabled = true,
wifi_ifaces = uci.sections('wireless', 'wifi-iface');
for (var i = 0; i < wifi_ifaces.length; i++) {
if (wifi_ifaces[i].device == radio && wifi_ifaces[i].disabled != '1') {
all_networks_disabled = false;
break;
}
}
if (all_networks_disabled)
uci.set('wireless', radio, 'disabled', '1');
}
return map.save().then(function() {
ui.changes.apply()
});
}
function next_free_sid(offset) {
var sid = 'wifinet' + offset;
while (uci.get('wireless', sid))
sid = 'wifinet' + (++offset);
return sid;
}
var CBIWifiFrequencyValue = form.Value.extend({
callFrequencyList: rpc.declare({
object: 'iwinfo',
method: 'freqlist',
params: [ 'device' ],
expect: { results: [] }
}),
load: function(section_id) {
return Promise.all([
network.getWifiDevice(section_id),
this.callFrequencyList(section_id)
]).then(L.bind(function(data) {
this.channels = {
'11g': L.hasSystemFeature('hostapd', 'acs') ? [ 'auto', 'auto', true ] : [],
'11a': L.hasSystemFeature('hostapd', 'acs') ? [ 'auto', 'auto', true ] : []
};
for (var i = 0; i < data[1].length; i++)
this.channels[(data[1][i].mhz > 2484) ? '11a' : '11g'].push(
data[1][i].channel,
'%d (%d Mhz)'.format(data[1][i].channel, data[1][i].mhz),
!data[1][i].restricted
);
var hwmodelist = L.toArray(data[0] ? data[0].getHWModes() : null)
.reduce(function(o, v) { o[v] = true; return o }, {});
this.modes = [
'', 'Legacy', true,
'n', 'N', hwmodelist.n,
'ac', 'AC', hwmodelist.ac
];
var htmodelist = L.toArray(data[0] ? data[0].getHTModes() : null)
.reduce(function(o, v) { o[v] = true; return o }, {});
this.htmodes = {
'': [ '', '-', true ],
'n': [
'HT20', '20 MHz', htmodelist.HT20,
'HT40', '40 MHz', htmodelist.HT40
],
'ac': [
'VHT20', '20 MHz', htmodelist.VHT20,
'VHT40', '40 MHz', htmodelist.VHT40,
'VHT80', '80 MHz', htmodelist.VHT80,
'VHT160', '160 MHz', htmodelist.VHT160
]
};
this.bands = {
'': [
'11g', '2.4 GHz', this.channels['11g'].length > 3,
'11a', '5 GHz', this.channels['11a'].length > 3
],
'n': [
'11g', '2.4 GHz', this.channels['11g'].length > 3,
'11a', '5 GHz', this.channels['11a'].length > 3
],
'ac': [
'11a', '5 GHz', true
]
};
}, this));
},
setValues: function(sel, vals) {
if (sel.vals)
sel.vals.selected = sel.selectedIndex;
while (sel.options[0])
sel.remove(0);
for (var i = 0; vals && i < vals.length; i += 3)
if (vals[i+2])
sel.add(E('option', { value: vals[i+0] }, [ vals[i+1] ]));
if (vals && !isNaN(vals.selected))
sel.selectedIndex = vals.selected;
sel.parentNode.style.display = (sel.options.length <= 1) ? 'none' : '';
sel.vals = vals;
},
toggleWifiMode: function(elem) {
this.toggleWifiHTMode(elem);
this.toggleWifiBand(elem);
},
toggleWifiHTMode: function(elem) {
var mode = elem.querySelector('.mode');
var bwdt = elem.querySelector('.htmode');
this.setValues(bwdt, this.htmodes[mode.value]);
},
toggleWifiBand: function(elem) {
var mode = elem.querySelector('.mode');
var band = elem.querySelector('.band');
this.setValues(band, this.bands[mode.value]);
this.toggleWifiChannel(elem);
},
toggleWifiChannel: function(elem) {
var band = elem.querySelector('.band');
var chan = elem.querySelector('.channel');
this.setValues(chan, this.channels[band.value]);
},
setInitialValues: function(section_id, elem) {
var mode = elem.querySelector('.mode'),
band = elem.querySelector('.band'),
chan = elem.querySelector('.channel'),
bwdt = elem.querySelector('.htmode'),
htval = uci.get('wireless', section_id, 'htmode'),
hwval = uci.get('wireless', section_id, 'hwmode'),
chval = uci.get('wireless', section_id, 'channel');
this.setValues(mode, this.modes);
if (/VHT20|VHT40|VHT80|VHT160/.test(htval))
mode.value = 'ac';
else if (/HT20|HT40/.test(htval))
mode.value = 'n';
else
mode.value = '';
this.toggleWifiMode(elem);
if (/a/.test(hwval))
band.value = '11a';
else
band.value = '11g';
this.toggleWifiBand(elem);
bwdt.value = htval;
chan.value = chval;
return elem;
},
renderWidget: function(section_id, option_index, cfgvalue) {
var elem = E('div');
L.dom.content(elem, [
E('label', { 'style': 'float:left; margin-right:3px' }, [
_('Mode'), E('br'),
E('select', {
'class': 'mode',
'style': 'width:auto',
'change': L.bind(this.toggleWifiMode, this, elem)
})
]),
E('label', { 'style': 'float:left; margin-right:3px' }, [
_('Band'), E('br'),
E('select', {
'class': 'band',
'style': 'width:auto',
'change': L.bind(this.toggleWifiBand, this, elem)
})
]),
E('label', { 'style': 'float:left; margin-right:3px' }, [
_('Channel'), E('br'),
E('select', {
'class': 'channel',
'style': 'width:auto'
})
]),
E('label', { 'style': 'float:left; margin-right:3px' }, [
_('Width'), E('br'),
E('select', {
'class': 'htmode',
'style': 'width:auto'
})
]),
E('br', { 'style': 'clear:left' })
]);
return this.setInitialValues(section_id, elem);
},
cfgvalue: function(section_id) {
return [
uci.get('wireless', section_id, 'htmode'),
uci.get('wireless', section_id, 'hwmode'),
uci.get('wireless', section_id, 'channel')
];
},
formvalue: function(section_id) {
var node = this.map.findElement('data-field', this.cbid(section_id));
return [
node.querySelector('.htmode').value,
node.querySelector('.band').value,
node.querySelector('.channel').value
];
},
write: function(section_id, value) {
uci.set('wireless', section_id, 'htmode', value[0] || null);
uci.set('wireless', section_id, 'hwmode', value[1]);
uci.set('wireless', section_id, 'channel', value[2]);
}
});
var CBIWifiTxPowerValue = form.ListValue.extend({
callTxPowerList: rpc.declare({
object: 'iwinfo',
method: 'txpowerlist',
params: [ 'device' ],
expect: { results: [] }
}),
load: function(section_id) {
return this.callTxPowerList(section_id).then(L.bind(function(pwrlist) {
this.powerval = this.wifiNetwork ? this.wifiNetwork.getTXPower() : null;
this.poweroff = this.wifiNetwork ? this.wifiNetwork.getTXPowerOffset() : null;
this.value('', _('driver default'));
for (var i = 0; i < pwrlist.length; i++)
this.value(pwrlist[i].dbm, '%d dBm (%d mW)'.format(pwrlist[i].dbm, pwrlist[i].mw));
return form.ListValue.prototype.load.apply(this, [section_id]);
}, this));
},
renderWidget: function(section_id, option_index, cfgvalue) {
var widget = form.ListValue.prototype.renderWidget.apply(this, [section_id, option_index, cfgvalue]);
widget.firstElementChild.style.width = 'auto';
L.dom.append(widget, E('span', [
' - ', _('Current power'), ': ',
E('span', [ this.powerval != null ? '%d dBm'.format(this.powerval) : E('em', _('unknown')) ]),
this.poweroff ? ' + %d dB offset = %s dBm'.format(this.poweroff, this.powerval != null ? this.powerval + this.poweroff : '?') : ''
]));
return widget;
}
});
var CBIWifiCountryValue = form.Value.extend({
callCountryList: rpc.declare({
object: 'iwinfo',
method: 'countrylist',
params: [ 'device' ],
expect: { results: [] }
}),
load: function(section_id) {
return this.callCountryList(section_id).then(L.bind(function(countrylist) {
if (Array.isArray(countrylist) && countrylist.length > 0) {
this.value('', _('driver default'));
for (var i = 0; i < countrylist.length; i++)
this.value(countrylist[i].iso3166, '%s - %s'.format(countrylist[i].iso3166, countrylist[i].country));
}
return form.Value.prototype.load.apply(this, [section_id]);
}, this));
},
validate: function(section_id, formvalue) {
if (formvalue != null && formvalue != '' && !/^[A-Z0-9][A-Z0-9]$/.test(formvalue))
return _('Use ISO/IEC 3166 alpha2 country codes.');
return true;
},
renderWidget: function(section_id, option_index, cfgvalue) {
var typeClass = (this.keylist && this.keylist.length) ? form.ListValue : form.Value;
return typeClass.prototype.renderWidget.apply(this, [section_id, option_index, cfgvalue]);
}
});
return L.view.extend({
poll_status: function(map, data) {
var rows = map.querySelectorAll('.cbi-section-table-row[data-sid]');
for (var i = 0; i < rows.length; i++) {
var section_id = rows[i].getAttribute('data-sid'),
radioDev = data[1].filter(function(d) { return d.getName() == section_id })[0],
radioNet = data[2].filter(function(n) { return n.getName() == section_id })[0],
badge = rows[i].querySelector('[data-name="_badge"] > div'),
stat = rows[i].querySelector('[data-name="_stat"]'),
btns = rows[i].querySelectorAll('.cbi-section-actions button'),
busy = btns[0].classList.contains('spinning') || btns[1].classList.contains('spinning') || btns[2].classList.contains('spinning');
if (radioDev) {
L.dom.content(badge, render_radio_badge(radioDev));
L.dom.content(stat, render_radio_status(radioDev, data[2].filter(function(n) { return n.getWifiDeviceName() == radioDev.getName() })));
}
else {
L.dom.content(badge, render_network_badge(radioNet));
L.dom.content(stat, render_network_status(radioNet));
}
if (stat.hasAttribute('restart'))
L.dom.content(stat, E('em', _('Device is restarting…')));
btns[0].disabled = busy;
btns[1].disabled = busy;
btns[2].disabled = busy;
}
var table = document.querySelector('#wifi_assoclist_table'),
hosts = data[0],
trows = [];
for (var i = 0; i < data[3].length; i++) {
var bss = data[3][i],
name = hosts.getHostnameByMACAddr(bss.mac),
ipv4 = hosts.getIPAddrByMACAddr(bss.mac),
ipv6 = hosts.getIP6AddrByMACAddr(bss.mac);
var hint;
if (name && ipv4 && ipv6)
hint = '%s <span class="hide-xs">(%s, %s)</span>'.format(name, ipv4, ipv6);
else if (name && (ipv4 || ipv6))
hint = '%s <span class="hide-xs">(%s)</span>'.format(name, ipv4 || ipv6);
else
hint = name || ipv4 || ipv6 || '?';
var row = [
E('span', {
'class': 'ifacebadge',
'data-ifname': bss.network.getIfname(),
'data-ssid': bss.network.getSSID()
}, [
E('img', {
'src': L.resource('icons/wifi%s.png').format(bss.network.isUp() ? '' : '_disabled'),
'title': bss.radio.getI18n()
}),
E('span', [
' %s '.format(bss.network.getShortName()),
E('small', '(%s)'.format(bss.network.getIfname()))
])
]),
bss.mac,
hint,
render_signal_badge(Math.min((bss.signal + 110) / 70 * 100, 100), bss.signal, bss.noise),
E('span', {}, [
E('span', format_wifirate(bss.rx)),
E('br'),
E('span', format_wifirate(bss.tx))
])
];
if (bss.network.isClientDisconnectSupported()) {
if (table.firstElementChild.childNodes.length < 6)
table.firstElementChild.appendChild(E('div', { 'class': 'th cbi-section-actions'}));
row.push(E('button', {
'class': 'cbi-button cbi-button-remove',
'click': L.bind(function(net, mac, ev) {
L.dom.parent(ev.currentTarget, '.tr').style.opacity = 0.5;
ev.currentTarget.classList.add('spinning');
ev.currentTarget.disabled = true;
ev.currentTarget.blur();
net.disconnectClient(mac, true, 5, 60000);
}, this, bss.network, bss.mac)
}, [ _('Disconnect') ]));
}
else {
row.push('-');
}
trows.push(row);
}
cbi_update_table(table, trows, E('em', _('No information available')));
var stat = document.querySelector('.cbi-modal [data-name="_wifistat_modal"] .ifacebadge.large');
if (stat)
render_modal_status(stat, data[2].filter(function(n) { return n.getName() == stat.getAttribute('data-network') })[0]);
return network.flushCache();
},
load: function() {
return Promise.all([
uci.changes(),
uci.load('wireless')
]);
},
checkAnonymousSections: function() {
var wifiIfaces = uci.sections('wireless', 'wifi-iface');
for (var i = 0; i < wifiIfaces.length; i++)
if (wifiIfaces[i]['.anonymous'])
return true;
return false;
},
callUciRename: rpc.declare({
object: 'uci',
method: 'rename',
params: [ 'config', 'section', 'name' ]
}),
render: function() {
if (this.checkAnonymousSections())
return this.renderMigration();
else
return this.renderOverview();
},
handleMigration: function(ev) {
var wifiIfaces = uci.sections('wireless', 'wifi-iface'),
id_offset = 0,
tasks = [];
for (var i = 0; i < wifiIfaces.length; i++) {
if (!wifiIfaces[i]['.anonymous'])
continue;
var new_name = next_free_sid(id_offset);
tasks.push(this.callUciRename('wireless', wifiIfaces[i]['.name'], new_name));
id_offset = +new_name.substring(7) + 1;
}
return Promise.all(tasks)
.then(L.bind(ui.changes.init, ui.changes))
.then(L.bind(ui.changes.apply, ui.changes));
},
renderMigration: function() {
ui.showModal(_('Wireless configuration migration'), [
E('p', _('The existing wireless configuration needs to be changed for LuCI to function properly.')),
E('p', _('Upon pressing "Continue", anonymous "wifi-iface" sections will be assigned with a name in the form <em>wifinet#</em> and the network will be restarted to apply the updated configuration.')),
E('div', { 'class': 'right' },
E('button', {
'class': 'btn cbi-button-action important',
'click': ui.createHandlerFn(this, 'handleMigration')
}, _('Continue')))
]);
},
renderOverview: function() {
var m, s, o;
m = new form.Map('wireless');
m.chain('network');
m.chain('firewall');
s = m.section(form.GridSection, 'wifi-device', _('Wireless Overview'));
s.anonymous = true;
s.addremove = false;
s.load = function() {
return network.getWifiDevices().then(L.bind(function(radios) {
this.radios = radios.sort(function(a, b) {
return a.getName() > b.getName();
});
var tasks = [];
for (var i = 0; i < radios.length; i++)
tasks.push(radios[i].getWifiNetworks());
return Promise.all(tasks);
}, this)).then(L.bind(function(data) {
this.wifis = [];
for (var i = 0; i < data.length; i++)
this.wifis.push.apply(this.wifis, data[i]);
}, this));
};
s.cfgsections = function() {
var rv = [];
for (var i = 0; i < this.radios.length; i++) {
rv.push(this.radios[i].getName());
for (var j = 0; j < this.wifis.length; j++)
if (this.wifis[j].getWifiDeviceName() == this.radios[i].getName())
rv.push(this.wifis[j].getName());
}
return rv;
};
s.modaltitle = function(section_id) {
var radioNet = this.wifis.filter(function(w) { return w.getName() == section_id})[0];
return radioNet ? radioNet.getI18n() : _('Edit wireless network');
};
s.lookupRadioOrNetwork = function(section_id) {
var radioDev = this.radios.filter(function(r) { return r.getName() == section_id })[0];
if (radioDev)
return radioDev;
var radioNet = this.wifis.filter(function(w) { return w.getName() == section_id })[0];
if (radioNet)
return radioNet;
return null;
};
s.renderRowActions = function(section_id) {
var inst = this.lookupRadioOrNetwork(section_id), btns;
if (inst.getWifiNetworks) {
btns = [
E('button', {
'class': 'cbi-button cbi-button-neutral',
'title': _('Restart radio interface'),
'click': ui.createHandlerFn(this, radio_restart, section_id)
}, _('Restart')),
E('button', {
'class': 'cbi-button cbi-button-action important',
'title': _('Find and join network'),
'click': ui.createHandlerFn(this, 'handleScan', inst)
}, _('Scan')),
E('button', {
'class': 'cbi-button cbi-button-add',
'title': _('Provide new network'),
'click': ui.createHandlerFn(this, 'handleAdd', inst)
}, _('Add'))
];
}
else {
var isDisabled = (inst.get('disabled') == '1' ||
uci.get('wireless', inst.getWifiDeviceName(), 'disabled') == '1');
btns = [
E('button', {
'class': 'cbi-button cbi-button-neutral enable-disable',
'title': isDisabled ? _('Enable this network') : _('Disable this network'),
'click': ui.createHandlerFn(this, network_updown, section_id, this.map)
}, isDisabled ? _('Enable') : _('Disable')),
E('button', {
'class': 'cbi-button cbi-button-action important',
'title': _('Edit this network'),
'click': ui.createHandlerFn(this, 'renderMoreOptionsModal', section_id)
}, _('Edit')),
E('button', {
'class': 'cbi-button cbi-button-negative remove',
'title': _('Delete this network'),
'click': ui.createHandlerFn(this, 'handleRemove', section_id)
}, _('Remove'))
];
}
return E('div', { 'class': 'td middle cbi-section-actions' }, E('div', btns));
};
s.addModalOptions = function(s) {
return network.getWifiNetwork(s.section).then(function(radioNet) {
var hwtype = uci.get('wireless', radioNet.getWifiDeviceName(), 'type');
var o, ss;
o = s.option(form.SectionValue, '_device', form.NamedSection, radioNet.getWifiDeviceName(), 'wifi-device', _('Device Configuration'));
o.modalonly = true;
ss = o.subsection;
ss.tab('general', _('General Setup'));
ss.tab('advanced', _('Advanced Settings'));
var isDisabled = (radioNet.get('disabled') == '1' ||
uci.get('wireless', radioNet.getWifiDeviceName(), 'disabled') == 1);
o = ss.taboption('general', form.DummyValue, '_wifistat_modal', _('Status'));
o.cfgvalue = L.bind(function(radioNet) {
return render_modal_status(null, radioNet);
}, this, radioNet);
o.write = function() {};
o = ss.taboption('general', form.Button, '_toggle', isDisabled ? _('Wireless network is disabled') : _('Wireless network is enabled'));
o.inputstyle = isDisabled ? 'apply' : 'reset';
o.inputtitle = isDisabled ? _('Enable') : _('Disable');
o.onclick = ui.createHandlerFn(s, network_updown, s.section, s.map);
o = ss.taboption('general', CBIWifiFrequencyValue, '_freq', '<br />' + _('Operating frequency'));
o.ucisection = s.section;
if (hwtype == 'mac80211') {
o = ss.taboption('general', CBIWifiTxPowerValue, 'txpower', _('Maximum transmit power'), _('Specifies the maximum transmit power the wireless radio may use. Depending on regulatory requirements and wireless usage, the actual transmit power may be reduced by the driver.'));
o.wifiNetwork = radioNet;
o = ss.taboption('advanced', CBIWifiCountryValue, 'country', _('Country Code'));
o.wifiNetwork = radioNet;
o = ss.taboption('advanced', form.Flag, 'legacy_rates', _('Allow legacy 802.11b rates'));
o.default = o.enabled;
o = ss.taboption('advanced', form.Value, 'distance', _('Distance Optimization'), _('Distance to farthest network member in meters.'));
o.datatype = 'range(0,114750)';
o.placeholder = 'auto';
o = ss.taboption('advanced', form.Value, 'frag', _('Fragmentation Threshold'));
o.datatype = 'min(256)';
o.placeholder = _('off');
o = ss.taboption('advanced', form.Value, 'rts', _('RTS/CTS Threshold'));
o.datatype = 'uinteger';
o.placeholder = _('off');
o = ss.taboption('advanced', form.Flag, 'noscan', _('Force 40MHz mode'), _('Always use 40MHz channels even if the secondary channel overlaps. Using this option does not comply with IEEE 802.11n-2009!'));
o.rmempty = true;
o = ss.taboption('advanced', form.Value, 'beacon_int', _('Beacon Interval'));
o.datatype = 'range(15,65535)';
o.placeholder = 100;
o.rmempty = true;
}
o = s.option(form.SectionValue, '_device', form.NamedSection, radioNet.getName(), 'wifi-iface', _('Interface Configuration'));
o.modalonly = true;
ss = o.subsection;
ss.tab('general', _('General Setup'));
ss.tab('encryption', _('Wireless Security'));
ss.tab('macfilter', _('MAC-Filter'));
ss.tab('advanced', _('Advanced Settings'));
o = ss.taboption('general', form.ListValue, 'mode', _('Mode'));
o.value('ap', _('Access Point'));
o.value('sta', _('Client'));
o.value('adhoc', _('Ad-Hoc'));
o = ss.taboption('general', form.Value, 'mesh_id', _('Mesh Id'));
o.depends('mode', 'mesh');
o = ss.taboption('advanced', form.Flag, 'mesh_fwding', _('Forward mesh peer traffic'));
o.rmempty = false;
o.default = '1';
o.depends('mode', 'mesh');
o = ss.taboption('advanced', form.Value, 'mesh_rssi_threshold', _('RSSI threshold for joining'), _('0 = not using RSSI threshold, 1 = do not change driver default'));
o.rmempty = false;
o.default = '0';
o.datatype = 'range(-255,1)';
o.depends('mode', 'mesh');
o = ss.taboption('general', form.Value, 'ssid', _('<abbr title="Extended Service Set Identifier">ESSID</abbr>'));
o.datatype = 'maxlength(32)';
o.depends('mode', 'ap');
o.depends('mode', 'sta');
o.depends('mode', 'adhoc');
o.depends('mode', 'ahdemo');
o.depends('mode', 'monitor');
o.depends('mode', 'ap-wds');
o.depends('mode', 'sta-wds');
o.depends('mode', 'wds');
o = ss.taboption('general', form.Value, 'bssid', _('<abbr title="Basic Service Set Identifier">BSSID</abbr>'));
o.datatype = 'macaddr';
o = ss.taboption('general', widgets.NetworkSelect, 'network', _('Network'), _('Choose the network(s) you want to attach to this wireless interface or fill out the <em>create</em> field to define a new network.'));
o.rmempty = true;
o.multiple = true;
o.novirtual = true;
o.write = function(section_id, value) {
return network.getDevice(section_id).then(L.bind(function(dev) {
var old_networks = dev.getNetworks().reduce(function(o, v) { o[v.getName()] = v; return o }, {}),
new_networks = {},
values = L.toArray(value),
tasks = [];
for (var i = 0; i < values.length; i++) {
new_networks[values[i]] = true;
if (old_networks[values[i]])
continue;
tasks.push(network.getNetwork(values[i]).then(L.bind(function(name, net) {
return net || network.addNetwork(name, { proto: 'none' });
}, this, values[i])).then(L.bind(function(dev, net) {
if (net) {
if (!net.isEmpty())
net.set('type', 'bridge');
net.addDevice(dev);
}
}, this, dev)));
}
for (var name in old_networks)
if (!new_networks[name])
tasks.push(network.getNetwork(name).then(L.bind(function(dev, net) {
if (net)
net.deleteDevice(dev);
}, this, dev)));
return Promise.all(tasks);
}, this));
};
if (hwtype == 'mac80211') {
var mode = ss.children[0],
bssid = ss.children[5],
encr;
mode.value('mesh', '802.11s');
mode.value('ahdemo', _('Pseudo Ad-Hoc (ahdemo)'));
mode.value('monitor', _('Monitor'));
bssid.depends('mode', 'adhoc');
bssid.depends('mode', 'sta');
bssid.depends('mode', 'sta-wds');
o = ss.taboption('macfilter', form.ListValue, 'macfilter', _('MAC-Address Filter'));
o.depends('mode', 'ap');
o.depends('mode', 'ap-wds');
o.value('', _('disable'));
o.value('allow', _('Allow listed only'));
o.value('deny', _('Allow all except listed'));
o = ss.taboption('macfilter', form.DynamicList, 'maclist', _('MAC-List'));
o.datatype = 'macaddr';
o.depends('macfilter', 'allow');
o.depends('macfilter', 'deny');
o.load = function(section_id) {
return network.getHostHints().then(L.bind(function(hints) {
hints.getMACHints().map(L.bind(function(hint) {
this.value(hint[0], hint[1] ? '%s (%s)'.format(hint[0], hint[1]) : hint[0]);
}, this));
return form.DynamicList.prototype.load.apply(this, [section_id]);
}, this));
};
mode.value('ap-wds', '%s (%s)'.format(_('Access Point'), _('WDS')));
mode.value('sta-wds', '%s (%s)'.format(_('Client'), _('WDS')));
mode.write = function(section_id, value) {
switch (value) {
case 'ap-wds':
uci.set('wireless', section_id, 'mode', 'ap');
uci.set('wireless', section_id, 'wds', '1');
break;
case 'sta-wds':
uci.set('wireless', section_id, 'mode', 'sta');
uci.set('wireless', section_id, 'wds', '1');
break;
default:
uci.set('wireless', section_id, 'mode', value);
uci.unset('wireless', section_id, 'wds');
break;
}
};
mode.cfgvalue = function(section_id) {
var mode = uci.get('wireless', section_id, 'mode'),
wds = uci.get('wireless', section_id, 'wds');
if (mode == 'ap' && wds)
return 'ap-wds';
else if (mode == 'sta' && wds)
return 'sta-wds';
return mode;
};
o = ss.taboption('general', form.Flag, 'hidden', _('Hide <abbr title="Extended Service Set Identifier">ESSID</abbr>'));
o.depends('mode', 'ap');
o.depends('mode', 'ap-wds');
o = ss.taboption('general', form.Flag, 'wmm', _('WMM Mode'));
o.depends('mode', 'ap');
o.depends('mode', 'ap-wds');
o.default = o.enabled;
o = ss.taboption('advanced', form.Flag, 'isolate', _('Isolate Clients'), _('Prevents client-to-client communication'));
o.depends('mode', 'ap');
o.depends('mode', 'ap-wds');
o = ss.taboption('advanced', form.Value, 'ifname', _('Interface name'), _('Override default interface name'));
o.optional = true;
o.placeholder = radioNet.getIfname();
if (/^radio\d+\.network/.test(o.placeholder))
o.placeholder = '';
o = ss.taboption('advanced', form.Flag, 'short_preamble', _('Short Preamble'));
o.default = o.enabled;
o = ss.taboption('advanced', form.Value, 'dtim_period', _('DTIM Interval'), _('Delivery Traffic Indication Message Interval'));
o.optional = true;
o.placeholder = 2;
o.datatype = 'range(1,255)';
o = ss.taboption('advanced', form.Value, 'wpa_group_rekey', _('Time interval for rekeying GTK'), _('sec'));
o.optional = true;
o.placeholder = 600;
o.datatype = 'uinteger';
o = ss.taboption('advanced', form.Flag , 'skip_inactivity_poll', _('Disable Inactivity Polling'));
o.optional = true;
o.datatype = 'uinteger';
o = ss.taboption('advanced', form.Value, 'max_inactivity', _('Station inactivity limit'), _('sec'));
o.optional = true;
o.placeholder = 300;
o.datatype = 'uinteger';
o = ss.taboption('advanced', form.Value, 'max_listen_interval', _('Maximum allowed Listen Interval'));
o.optional = true;
o.placeholder = 65535;
o.datatype = 'uinteger';
o = ss.taboption('advanced', form.Flag, 'disassoc_low_ack', _('Disassociate On Low Acknowledgement'), _('Allow AP mode to disconnect STAs based on low ACK condition'));
o.default = o.enabled;
}
encr = o = ss.taboption('encryption', form.ListValue, 'encryption', _('Encryption'));
o.depends('mode', 'ap');
o.depends('mode', 'sta');
o.depends('mode', 'adhoc');
o.depends('mode', 'ahdemo');
o.depends('mode', 'ap-wds');
o.depends('mode', 'sta-wds');
o.depends('mode', 'mesh');
o.cfgvalue = function(section_id) {
var v = String(uci.get('wireless', section_id, 'encryption'));
if (v == 'wep')
return 'wep-open';
else if (v.match(/\+/))
return v.replace(/\+.+$/, '');
return v;
};
o.write = function(section_id, value) {
var e = this.section.children.filter(function(o) { return o.option == 'encryption' })[0].formvalue(section_id),
co = this.section.children.filter(function(o) { return o.option == 'cipher' })[0], c = co.formvalue(section_id);
if (value == 'wpa' || value == 'wpa2')
uci.unset('wireless', section_id, 'key');
if (co.isActive(section_id) && e && (c == 'tkip' || c == 'ccmp' || c == 'tkip+ccmp'))
e += '+' + c;
uci.set('wireless', section_id, 'encryption', e);
};
o = ss.taboption('encryption', form.ListValue, 'cipher', _('Cipher'));
o.depends('encryption', 'wpa');
o.depends('encryption', 'wpa2');
o.depends('encryption', 'psk');
o.depends('encryption', 'psk2');
o.depends('encryption', 'wpa-mixed');
o.depends('encryption', 'psk-mixed');
o.value('auto', _('auto'));
o.value('ccmp', _('Force CCMP (AES)'));
o.value('tkip', _('Force TKIP'));
o.value('tkip+ccmp', _('Force TKIP and CCMP (AES)'));
o.write = ss.children.filter(function(o) { return o.option == 'encryption' })[0].write;
o.cfgvalue = function(section_id) {
var v = String(uci.get('wireless', section_id, 'encryption'));
if (v.match(/\+/)) {
v = v.replace(/^[^+]+\+/, '');
if (v == 'aes')
v = 'ccmp';
else if (v == 'tkip+aes' || v == 'aes+tkip' || v == 'ccmp+tkip')
v = 'tkip+ccmp';
}
return v;
};
var crypto_modes = [];
if (hwtype == 'mac80211') {
var has_supplicant = L.hasSystemFeature('wpasupplicant'),
has_hostapd = L.hasSystemFeature('hostapd');
// Probe EAP support
var has_ap_eap = L.hasSystemFeature('hostapd', 'eap'),
has_sta_eap = L.hasSystemFeature('wpasupplicant', 'eap');
// Probe SAE support
var has_ap_sae = L.hasSystemFeature('hostapd', 'sae'),
has_sta_sae = L.hasSystemFeature('wpasupplicant', 'sae');
// Probe OWE support
var has_ap_owe = L.hasSystemFeature('hostapd', 'owe'),
has_sta_owe = L.hasSystemFeature('wpasupplicant', 'owe');
if (has_hostapd || has_supplicant) {
crypto_modes.push(['psk2', 'WPA2-PSK', 33]);
crypto_modes.push(['psk-mixed', 'WPA-PSK/WPA2-PSK Mixed Mode', 22]);
crypto_modes.push(['psk', 'WPA-PSK', 21]);
}
else {
encr.description = _('WPA-Encryption requires wpa_supplicant (for client mode) or hostapd (for AP and ad-hoc mode) to be installed.');
}
if (has_ap_sae || has_sta_sae) {
crypto_modes.push(['sae', 'WPA3-SAE', 31]);
crypto_modes.push(['sae-mixed', 'WPA2-PSK/WPA3-SAE Mixed Mode', 30]);
}
if (has_ap_eap || has_sta_eap) {
crypto_modes.push(['wpa2', 'WPA2-EAP', 32]);
crypto_modes.push(['wpa', 'WPA-EAP', 20]);
}
if (has_ap_owe || has_sta_owe) {
crypto_modes.push(['owe', 'OWE', 1]);
}
encr.crypto_support = {
'ap': {
'wep-open': true,
'wep-shared': true,
'psk': has_hostapd || _('Requires hostapd'),
'psk2': has_hostapd || _('Requires hostapd'),
'psk-mixed': has_hostapd || _('Requires hostapd'),
'sae': has_ap_sae || _('Requires hostapd with SAE support'),
'sae-mixed': has_ap_sae || _('Requires hostapd with SAE support'),
'wpa': has_ap_eap || _('Requires hostapd with EAP support'),
'wpa2': has_ap_eap || _('Requires hostapd with EAP support'),
'owe': has_ap_owe || _('Requires hostapd with OWE support')
},
'sta': {
'wep-open': true,
'wep-shared': true,
'psk': has_supplicant || _('Requires wpa-supplicant'),
'psk2': has_supplicant || _('Requires wpa-supplicant'),
'psk-mixed': has_supplicant || _('Requires wpa-supplicant'),
'sae': has_sta_sae || _('Requires wpa-supplicant with SAE support'),
'sae-mixed': has_sta_sae || _('Requires wpa-supplicant with SAE support'),
'wpa': has_sta_eap || _('Requires wpa-supplicant with EAP support'),
'wpa2': has_sta_eap || _('Requires wpa-supplicant with EAP support'),
'owe': has_sta_owe || _('Requires wpa-supplicant with OWE support')
},
'adhoc': {
'wep-open': true,
'wep-shared': true,
'psk': has_supplicant || _('Requires wpa-supplicant'),
'psk2': has_supplicant || _('Requires wpa-supplicant'),
'psk-mixed': has_supplicant || _('Requires wpa-supplicant'),
},
'mesh': {
'sae': has_sta_sae || _('Requires wpa-supplicant with SAE support')
},
'ahdemo': {
'wep-open': true,
'wep-shared': true
},
'wds': {
'wep-open': true,
'wep-shared': true
}
};
encr.crypto_support['ap-wds'] = encr.crypto_support['ap'];
encr.crypto_support['sta-wds'] = encr.crypto_support['sta'];
encr.validate = function(section_id, value) {
var modeopt = this.section.children.filter(function(o) { return o.option == 'mode' })[0],
modeval = modeopt.formvalue(section_id),
modetitle = modeopt.vallist[modeopt.keylist.indexOf(modeval)],
enctitle = this.vallist[this.keylist.indexOf(value)];
if (value == 'none')
return true;
if (!L.isObject(this.crypto_support[modeval]) || !this.crypto_support[modeval].hasOwnProperty(value))
return _('The selected %s mode is incompatible with %s encryption').format(modetitle, enctitle);
return this.crypto_support[modeval][value];
};
}
else if (hwtype == 'broadcom') {
crypto_modes.push(['psk2', 'WPA2-PSK', 33]);
crypto_modes.push(['psk+psk2', 'WPA-PSK/WPA2-PSK Mixed Mode', 22]);
crypto_modes.push(['psk', 'WPA-PSK', 21]);
}
crypto_modes.push(['wep-open', _('WEP Open System'), 11]);
crypto_modes.push(['wep-shared', _('WEP Shared Key'), 10]);
crypto_modes.push(['none', _('No Encryption'), 0]);
crypto_modes.sort(function(a, b) { return b[2] - a[2] });
for (var i = 0; i < crypto_modes.length; i++) {
var security_level = (crypto_modes[i][2] >= 30) ? _('strong security')
: (crypto_modes[i][2] >= 20) ? _('medium security')
: (crypto_modes[i][2] >= 10) ? _('weak security') : _('open network');
encr.value(crypto_modes[i][0], '%s (%s)'.format(crypto_modes[i][1], security_level));
}
o = ss.taboption('encryption', form.Value, 'auth_server', _('Radius-Authentication-Server'));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.rmempty = true;
o.datatype = 'host(0)';
o = ss.taboption('encryption', form.Value, 'auth_port', _('Radius-Authentication-Port'), _('Default %d').format(1812));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.rmempty = true;
o.datatype = 'port';
o = ss.taboption('encryption', form.Value, 'auth_secret', _('Radius-Authentication-Secret'));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.rmempty = true;
o.password = true;
o = ss.taboption('encryption', form.Value, 'acct_server', _('Radius-Accounting-Server'));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.rmempty = true;
o.datatype = 'host(0)';
o = ss.taboption('encryption', form.Value, 'acct_port', _('Radius-Accounting-Port'), _('Default %d').format(1813));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.rmempty = true;
o.datatype = 'port';
o = ss.taboption('encryption', form.Value, 'acct_secret', _('Radius-Accounting-Secret'));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.rmempty = true;
o.password = true;
o = ss.taboption('encryption', form.Value, 'dae_client', _('DAE-Client'));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.rmempty = true;
o.datatype = 'host(0)';
o = ss.taboption('encryption', form.Value, 'dae_port', _('DAE-Port'), _('Default %d').format(3799));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.rmempty = true;
o.datatype = 'port';
o = ss.taboption('encryption', form.Value, 'dae_secret', _('DAE-Secret'));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.rmempty = true;
o.password = true;
o = ss.taboption('encryption', form.Value, '_wpa_key', _('Key'));
o.depends('encryption', 'psk');
o.depends('encryption', 'psk2');
o.depends('encryption', 'psk+psk2');
o.depends('encryption', 'psk-mixed');
o.depends('encryption', 'sae');
o.depends('encryption', 'sae-mixed');
o.datatype = 'wpakey';
o.rmempty = true;
o.password = true;
o.cfgvalue = function(section_id) {
var key = uci.get('wireless', section_id, 'key');
return /^[1234]$/.test(key) ? null : key;
};
o.write = function(section_id, value) {
uci.set('wireless', section_id, 'key', value);
uci.unset('wireless', section_id, 'key1');
uci.unset('wireless', section_id, 'key2');
uci.unset('wireless', section_id, 'key3');
uci.unset('wireless', section_id, 'key4');
};
o = ss.taboption('encryption', form.ListValue, '_wep_key', _('Used Key Slot'));
o.depends('encryption', 'wep-open');
o.depends('encryption', 'wep-shared');
o.value('1', _('Key #%d').format(1));
o.value('2', _('Key #%d').format(2));
o.value('3', _('Key #%d').format(3));
o.value('4', _('Key #%d').format(4));
o.cfgvalue = function(section_id) {
var slot = +uci.get('wireless', section_id, 'key');
return (slot >= 1 && slot <= 4) ? String(slot) : '';
};
o.write = function(section_id, value) {
uci.set('wireless', section_id, 'key', value);
};
for (var slot = 1; slot <= 4; slot++) {
o = ss.taboption('encryption', form.Value, 'key%d'.format(slot), _('Key #%d').format(slot));
o.depends('encryption', 'wep-open');
o.depends('encryption', 'wep-shared');
o.datatype = 'wepkey';
o.rmempty = true;
o.password = true;
o.write = function(section_id, value) {
if (value != null && (value.length == 5 || value.length == 13))
value = 's:%s'.format(value);
uci.set('wireless', section_id, this.option, value);
};
}
if (hwtype == 'mac80211') {
// Probe 802.11r support (and EAP support as a proxy for Openwrt)
var has_80211r = L.hasSystemFeature('hostapd', '11r') || L.hasSystemFeature('hostapd', 'eap');
o = ss.taboption('encryption', form.Flag, 'ieee80211r', _('802.11r Fast Transition'), _('Enables fast roaming among access points that belong to the same Mobility Domain'));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
if (has_80211r) {
o.depends({ mode: 'ap', encryption: 'psk' });
o.depends({ mode: 'ap', encryption: 'psk2' });
o.depends({ mode: 'ap', encryption: 'psk-mixed' });
o.depends({ mode: 'ap', encryption: 'sae' });
o.depends({ mode: 'ap', encryption: 'sae-mixed' });
o.depends({ mode: 'ap-wds', encryption: 'psk' });
o.depends({ mode: 'ap-wds', encryption: 'psk2' });
o.depends({ mode: 'ap-wds', encryption: 'psk-mixed' });
o.depends({ mode: 'ap-wds', encryption: 'sae' });
o.depends({ mode: 'ap-wds', encryption: 'sae-mixed' });
}
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'nasid', _('NAS ID'), _('Used for two different purposes: RADIUS NAS ID and 802.11r R0KH-ID. Not needed with normal WPA(2)-PSK.'));
o.depends({ mode: 'ap', encryption: 'wpa' });
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.depends({ ieee80211r: '1' });
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'mobility_domain', _('Mobility Domain'), _('4-character hexadecimal ID'));
o.depends({ ieee80211r: '1' });
o.placeholder = '4f57';
o.datatype = 'and(hexstring,length(4))';
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'reassociation_deadline', _('Reassociation Deadline'), _('time units (TUs / 1.024 ms) [1000-65535]'));
o.depends({ ieee80211r: '1' });
o.placeholder = '1000';
o.datatype = 'range(1000,65535)';
o.rmempty = true;
o = ss.taboption('encryption', form.ListValue, 'ft_over_ds', _('FT protocol'));
o.depends({ ieee80211r: '1' });
o.value('1', _('FT over DS'));
o.value('0', _('FT over the Air'));
o.rmempty = true;
o = ss.taboption('encryption', form.Flag, 'ft_psk_generate_local', _('Generate PMK locally'), _('When using a PSK, the PMK can be automatically generated. When enabled, the R0/R1 key options below are not applied. Disable this to use the R0 and R1 key options.'));
o.depends({ ieee80211r: '1' });
o.default = o.enabled;
o.rmempty = false;
o = ss.taboption('encryption', form.Value, 'r0_key_lifetime', _('R0 Key Lifetime'), _('minutes'));
o.depends({ ieee80211r: '1' });
o.placeholder = '10000';
o.datatype = 'uinteger';
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'r1_key_holder', _('R1 Key Holder'), _('6-octet identifier as a hex string - no colons'));
o.depends({ ieee80211r: '1' });
o.placeholder = '00004f577274';
o.datatype = 'and(hexstring,length(12))';
o.rmempty = true;
o = ss.taboption('encryption', form.Flag, 'pmk_r1_push', _('PMK R1 Push'));
o.depends({ ieee80211r: '1' });
o.placeholder = '0';
o.rmempty = true;
o = ss.taboption('encryption', form.DynamicList, 'r0kh', _('External R0 Key Holder List'), _('List of R0KHs in the same Mobility Domain. <br />Format: MAC-address,NAS-Identifier,128-bit key as hex string. <br />This list is used to map R0KH-ID (NAS Identifier) to a destination MAC address when requesting PMK-R1 key from the R0KH that the STA used during the Initial Mobility Domain Association.'));
o.depends({ ieee80211r: '1' });
o.rmempty = true;
o = ss.taboption('encryption', form.DynamicList, 'r1kh', _('External R1 Key Holder List'), _ ('List of R1KHs in the same Mobility Domain. <br />Format: MAC-address,R1KH-ID as 6 octets with colons,128-bit key as hex string. <br />This list is used to map R1KH-ID to a destination MAC address when sending PMK-R1 key from the R0KH. This is also the list of authorized R1KHs in the MD that can request PMK-R1 keys.'));
o.depends({ ieee80211r: '1' });
o.rmempty = true;
// End of 802.11r options
o = ss.taboption('encryption', form.ListValue, 'eap_type', _('EAP-Method'));
o.value('tls', 'TLS');
o.value('ttls', 'TTLS');
o.value('peap', 'PEAP');
o.value('fast', 'FAST');
o.depends({ mode: 'sta', encryption: 'wpa' });
o.depends({ mode: 'sta', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
o = ss.taboption('encryption', form.Flag, 'ca_cert_usesystem', _('Use system certificates'), _("Validate server certificate using built-in system CA bundle,<br />requires the \"ca-bundle\" package"));
o.enabled = '1';
o.disabled = '0';
o.default = o.disabled;
o.depends({ mode: 'sta', encryption: 'wpa' });
o.depends({ mode: 'sta', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
o.validate = function(section_id, value) {
if (value == '1' && !L.hasSystemFeature('cabundle')) {
return _("This option cannot be used because the ca-bundle package is not installed.");
}
return true;
};
o = ss.taboption('encryption', form.FileUpload, 'ca_cert', _('Path to CA-Certificate'));
o.depends({ mode: 'sta', encryption: 'wpa', ca_cert_usesystem: '0' });
o.depends({ mode: 'sta', encryption: 'wpa2', ca_cert_usesystem: '0' });
o.depends({ mode: 'sta-wds', encryption: 'wpa', ca_cert_usesystem: '0' });
o.depends({ mode: 'sta-wds', encryption: 'wpa2', ca_cert_usesystem: '0' });
o = ss.taboption('encryption', form.Value, 'subject_match', _('Certificate constraint (Subject)'), _("Certificate constraint substring - e.g. /CN=wifi.mycompany.com<br />See `logread -f` during handshake for actual values"));
o.depends({ mode: 'sta', encryption: 'wpa' });
o.depends({ mode: 'sta', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
o = ss.taboption('encryption', form.DynamicList, 'altsubject_match', _('Certificate constraint (SAN)'), _("Certificate constraint(s) via Subject Alternate Name values<br />(supported attributes: EMAIL, DNS, URI) - e.g. DNS:wifi.mycompany.com"));
o.depends({ mode: 'sta', encryption: 'wpa' });
o.depends({ mode: 'sta', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
o = ss.taboption('encryption', form.DynamicList, 'domain_match', _('Certificate constraint (Domain)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (exact match)"));
o.depends({ mode: 'sta', encryption: 'wpa' });
o.depends({ mode: 'sta', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
o = ss.taboption('encryption', form.DynamicList, 'domain_suffix_match', _('Certificate constraint (Wildcard)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (suffix match)"));
o.depends({ mode: 'sta', encryption: 'wpa' });
o.depends({ mode: 'sta', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
o = ss.taboption('encryption', form.FileUpload, 'client_cert', _('Path to Client-Certificate'));
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
o = ss.taboption('encryption', form.FileUpload, 'priv_key', _('Path to Private Key'));
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
o = ss.taboption('encryption', form.Value, 'priv_key_pwd', _('Password of Private Key'));
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
o.password = true;
o = ss.taboption('encryption', form.ListValue, 'auth', _('Authentication'));
o.value('PAP', 'PAP');
o.value('CHAP', 'CHAP');
o.value('MSCHAP', 'MSCHAP');
o.value('MSCHAPV2', 'MSCHAPv2');
o.value('EAP-GTC');
o.value('EAP-MD5');
o.value('EAP-MSCHAPV2');
o.value('EAP-TLS');
o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa' });
o.validate = function(section_id, value) {
var eo = this.section.children.filter(function(o) { return o.option == 'eap_type' })[0],
ev = eo.formvalue(section_id);
if (ev != 'ttls' && (value == 'PAP' || value == 'CHAP' || value == 'MSCHAP' || value == 'MSCHAPV2'))
return _('This authentication type is not applicable to the selected EAP method.');
return true;
};
o = ss.taboption('encryption', form.Flag, 'ca_cert2_usesystem', _('Use system certificates for inner-tunnel'), _("Validate server certificate using built-in system CA bundle,<br />requires the \"ca-bundle\" package"));
o.enabled = '1';
o.disabled = '0';
o.default = o.disabled;
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
o.validate = function(section_id, value) {
if (value == '1' && !L.hasSystemFeature('cabundle')) {
return _("This option cannot be used because the ca-bundle package is not installed.");
}
return true;
};
o = ss.taboption('encryption', form.FileUpload, 'ca_cert2', _('Path to inner CA-Certificate'));
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa', ca_cert2_usesystem: '0' });
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2', ca_cert2_usesystem: '0' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa', ca_cert2_usesystem: '0' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2', ca_cert2_usesystem: '0' });
o = ss.taboption('encryption', form.Value, 'subject_match2', _('Inner certificate constraint (Subject)'), _("Certificate constraint substring - e.g. /CN=wifi.mycompany.com<br />See `logread -f` during handshake for actual values"));
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
o = ss.taboption('encryption', form.DynamicList, 'altsubject_match2', _('Inner certificate constraint (SAN)'), _("Certificate constraint(s) via Subject Alternate Name values<br />(supported attributes: EMAIL, DNS, URI) - e.g. DNS:wifi.mycompany.com"));
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
o = ss.taboption('encryption', form.DynamicList, 'domain_match2', _('Inner certificate constraint (Domain)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (exact match)"));
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
o = ss.taboption('encryption', form.DynamicList, 'domain_suffix_match2', _('Inner certificate constraint (Wildcard)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (suffix match)"));
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
o = ss.taboption('encryption', form.FileUpload, 'client_cert2', _('Path to inner Client-Certificate'));
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
o = ss.taboption('encryption', form.FileUpload, 'priv_key2', _('Path to inner Private Key'));
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
o = ss.taboption('encryption', form.Value, 'priv_key2_pwd', _('Password of inner Private Key'));
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
o.password = true;
o = ss.taboption('encryption', form.Value, 'identity', _('Identity'));
o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
o = ss.taboption('encryption', form.Value, 'anonymous_identity', _('Anonymous Identity'));
o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
o = ss.taboption('encryption', form.Value, 'password', _('Password'));
o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa' });
o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa2' });
o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa' });
o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa' });
o.password = true;
if (hwtype == 'mac80211') {
// ieee802.11w options
if (L.hasSystemFeature('hostapd', '11w')) {
o = ss.taboption('encryption', form.ListValue, 'ieee80211w', _('802.11w Management Frame Protection'), _("Requires the 'full' version of wpad/hostapd and support from the wifi driver <br />(as of Jan 2019: ath9k, ath10k, mwlwifi and mt76)"));
o.value('', _('Disabled'));
o.value('1', _('Optional'));
o.value('2', _('Required'));
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.depends({ mode: 'ap', encryption: 'psk2' });
o.depends({ mode: 'ap', encryption: 'psk-mixed' });
o.depends({ mode: 'ap', encryption: 'sae' });
o.depends({ mode: 'ap', encryption: 'sae-mixed' });
o.depends({ mode: 'ap', encryption: 'owe' });
o.depends({ mode: 'ap-wds', encryption: 'psk2' });
o.depends({ mode: 'ap-wds', encryption: 'psk-mixed' });
o.depends({ mode: 'ap-wds', encryption: 'sae' });
o.depends({ mode: 'ap-wds', encryption: 'sae-mixed' });
o.depends({ mode: 'ap-wds', encryption: 'owe' });
o.depends({ mode: 'sta', encryption: 'wpa2' });
o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
o.depends({ mode: 'sta', encryption: 'psk2' });
o.depends({ mode: 'sta', encryption: 'psk-mixed' });
o.depends({ mode: 'sta', encryption: 'sae' });
o.depends({ mode: 'sta', encryption: 'sae-mixed' });
o.depends({ mode: 'sta', encryption: 'owe' });
o.depends({ mode: 'sta-wds', encryption: 'psk2' });
o.depends({ mode: 'sta-wds', encryption: 'psk-mixed' });
o.depends({ mode: 'sta-wds', encryption: 'sae' });
o.depends({ mode: 'sta-wds', encryption: 'sae-mixed' });
o.depends({ mode: 'sta-wds', encryption: 'owe' });
o.defaults = {
'2': [{ encryption: 'sae' }, { encryption: 'owe' }],
'1': [{ encryption: 'sae-mixed'}],
'': []
};
o = ss.taboption('encryption', form.Value, 'ieee80211w_max_timeout', _('802.11w maximum timeout'), _('802.11w Association SA Query maximum timeout'));
o.depends('ieee80211w', '1');
o.depends('ieee80211w', '2');
o.datatype = 'uinteger';
o.placeholder = '1000';
o.rmempty = true;
o = ss.taboption('encryption', form.Value, 'ieee80211w_retry_timeout', _('802.11w retry timeout'), _('802.11w Association SA Query retry timeout'));
o.depends('ieee80211w', '1');
o.depends('ieee80211w', '2');
o.datatype = 'uinteger';
o.placeholder = '201';
o.rmempty = true;
};
o = ss.taboption('encryption', form.Flag, 'wpa_disable_eapol_key_retries', _('Enable key reinstallation (KRACK) countermeasures'), _('Complicates key reinstallation attacks on the client side by disabling retransmission of EAPOL-Key frames that are used to install keys. This workaround might cause interoperability issues and reduced robustness of key negotiation especially in environments with heavy traffic load.'));
o.depends({ mode: 'ap', encryption: 'wpa2' });
o.depends({ mode: 'ap', encryption: 'psk2' });
o.depends({ mode: 'ap', encryption: 'psk-mixed' });
o.depends({ mode: 'ap', encryption: 'sae' });
o.depends({ mode: 'ap', encryption: 'sae-mixed' });
o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
o.depends({ mode: 'ap-wds', encryption: 'psk2' });
o.depends({ mode: 'ap-wds', encryption: 'psk-mixed' });
o.depends({ mode: 'ap-wds', encryption: 'sae' });
o.depends({ mode: 'ap-wds', encryption: 'sae-mixed' });
if (L.hasSystemFeature('hostapd', 'cli') && L.hasSystemFeature('wpasupplicant')) {
o = ss.taboption('encryption', form.Flag, 'wps_pushbutton', _('Enable WPS pushbutton, requires WPA(2)-PSK/WPA3-SAE'))
o.enabled = '1';
o.disabled = '0';
o.default = o.disabled;
o.depends('encryption', 'psk');
o.depends('encryption', 'psk2');
o.depends('encryption', 'psk-mixed');
o.depends('encryption', 'sae');
o.depends('encryption', 'sae-mixed');
}
}
}
});
};
s.handleRemove = function(section_id, ev) {
document.querySelector('.cbi-section-table-row[data-sid="%s"]'.format(section_id)).style.opacity = 0.5;
return form.TypedSection.prototype.handleRemove.apply(this, [section_id, ev]);
};
s.handleScan = function(radioDev, ev) {
var table = E('div', { 'class': 'table' }, [
E('div', { 'class': 'tr table-titles' }, [
E('div', { 'class': 'th col-2 middle center' }, _('Signal')),
E('div', { 'class': 'th col-4 middle left' }, _('SSID')),
E('div', { 'class': 'th col-2 middle center hide-xs' }, _('Channel')),
E('div', { 'class': 'th col-2 middle left hide-xs' }, _('Mode')),
E('div', { 'class': 'th col-3 middle left hide-xs' }, _('BSSID')),
E('div', { 'class': 'th col-3 middle left' }, _('Encryption')),
E('div', { 'class': 'th cbi-section-actions right' }, ' '),
])
]);
var stop = E('button', {
'class': 'btn',
'click': L.bind(this.handleScanStartStop, this),
'style': 'display:none',
'data-state': 'stop'
}, _('Stop refresh'));
cbi_update_table(table, [], E('em', { class: 'spinning' }, _('Starting wireless scan...')));
var md = ui.showModal(_('Join Network: Wireless Scan'), [
table,
E('div', { 'class': 'right' }, [
stop,
' ',
E('button', {
'class': 'btn',
'click': L.bind(this.handleScanAbort, this)
}, _('Dismiss'))
])
]);
md.style.maxWidth = '90%';
md.style.maxHeight = 'none';
this.pollFn = L.bind(this.handleScanRefresh, this, radioDev, {}, table, stop);
L.Poll.add(this.pollFn);
L.Poll.start();
};
s.handleScanRefresh = function(radioDev, scanCache, table, stop) {
return radioDev.getScanList().then(L.bind(function(results) {
var rows = [];
for (var i = 0; i < results.length; i++)
scanCache[results[i].bssid] = results[i];
for (var k in scanCache)
if (scanCache[k].stale)
results.push(scanCache[k]);
results.sort(function(a, b) {
var diff = (b.quality - a.quality) || (a.channel - b.channel);
if (diff)
return diff;
if (a.ssid < b.ssid)
return -1;
else if (a.ssid > b.ssid)
return 1;
if (a.bssid < b.bssid)
return -1;
else if (a.bssid > b.bssid)
return 1;
});
for (var i = 0; i < results.length; i++) {
var res = results[i],
qv = res.quality || 0,
qm = res.quality_max || 0,
q = (qv > 0 && qm > 0) ? Math.floor((100 / qm) * qv) : 0,
s = res.stale ? 'opacity:0.5' : '';
rows.push([
E('span', { 'style': s }, render_signal_badge(q, res.signal, res.noise)),
E('span', { 'style': s }, (res.ssid != null) ? '%h'.format(res.ssid) : E('em', _('hidden'))),
E('span', { 'style': s }, '%d'.format(res.channel)),
E('span', { 'style': s }, '%h'.format(res.mode)),
E('span', { 'style': s }, '%h'.format(res.bssid)),
E('span', { 'style': s }, '%h'.format(network.formatWifiEncryption(res.encryption))),
E('div', { 'class': 'right' }, E('button', {
'class': 'cbi-button cbi-button-action important',
'click': L.bind(this.handleJoin, this, radioDev, res)
}, _('Join Network')))
]);
res.stale = true;
}
cbi_update_table(table, rows);
stop.disabled = false;
stop.style.display = '';
stop.classList.remove('spinning');
}, this));
};
s.handleScanStartStop = function(ev) {
var btn = ev.currentTarget;
if (btn.getAttribute('data-state') == 'stop') {
L.Poll.remove(this.pollFn);
btn.firstChild.data = _('Start refresh');
btn.setAttribute('data-state', 'start');
}
else {
L.Poll.add(this.pollFn);
btn.firstChild.data = _('Stop refresh');
btn.setAttribute('data-state', 'stop');
btn.classList.add('spinning');
btn.disabled = true;
}
};
s.handleScanAbort = function(ev) {
var md = L.dom.parent(ev.target, 'div[aria-modal="true"]');
if (md) {
md.style.maxWidth = '';
md.style.maxHeight = '';
}
ui.hideModal();
L.Poll.remove(this.pollFn);
this.pollFn = null;
};
s.handleJoinConfirm = function(radioDev, bss, form, ev) {
var nameopt = L.toArray(form.lookupOption('name', '_new_'))[0],
passopt = L.toArray(form.lookupOption('password', '_new_'))[0],
bssidopt = L.toArray(form.lookupOption('bssid', '_new_'))[0],
zoneopt = L.toArray(form.lookupOption('zone', '_new_'))[0],
replopt = L.toArray(form.lookupOption('replace', '_new_'))[0],
nameval = (nameopt && nameopt.isValid('_new_')) ? nameopt.formvalue('_new_') : null,
passval = (passopt && passopt.isValid('_new_')) ? passopt.formvalue('_new_') : null,
bssidval = (bssidopt && bssidopt.isValid('_new_')) ? bssidopt.formvalue('_new_') : null,
zoneval = zoneopt ? zoneopt.formvalue('_new_') : null,
enc = L.isObject(bss.encryption) ? bss.encryption : null,
is_wep = (enc && Array.isArray(enc.wep)),
is_psk = (enc && Array.isArray(enc.wpa) && L.toArray(enc.authentication).filter(function(a) { return a == 'psk' })),
is_sae = (enc && Array.isArray(enc.wpa) && L.toArray(enc.authentication).filter(function(a) { return a == 'sae' }));
if (nameval == null || (passopt && passval == null))
return;
var section_id = null;
return this.map.save(function() {
var wifi_sections = uci.sections('wireless', 'wifi-iface');
if (replopt.formvalue('_new_') == '1') {
for (var i = 0; i < wifi_sections.length; i++)
if (wifi_sections[i].device == radioDev.getName())
uci.remove('wireless', wifi_sections[i]['.name']);
}
if (uci.get('wireless', radioDev.getName(), 'disabled') == '1') {
for (var i = 0; i < wifi_sections.length; i++)
if (wifi_sections[i].device == radioDev.getName())
uci.set('wireless', wifi_sections[i]['.name'], 'disabled', '1');
uci.unset('wireless', radioDev.getName(), 'disabled');
}
section_id = next_free_sid(wifi_sections.length);
uci.add('wireless', 'wifi-iface', section_id);
uci.set('wireless', section_id, 'device', radioDev.getName());
uci.set('wireless', section_id, 'mode', (bss.mode == 'Ad-Hoc') ? 'adhoc' : 'sta');
uci.set('wireless', section_id, 'network', nameval);
if (bss.ssid != null) {
uci.set('wireless', section_id, 'ssid', bss.ssid);
if (bssidval == '1')
uci.set('wireless', section_id, 'bssid', bss.bssid);
}
else if (bss.bssid != null) {
uci.set('wireless', section_id, 'bssid', bss.bssid);
}
if (is_sae) {
uci.set('wireless', section_id, 'encryption', 'sae');
uci.set('wireless', section_id, 'key', passval);
}
else if (is_psk) {
for (var i = enc.wpa.length - 1; i >= 0; i--) {
if (enc.wpa[i] == 2) {
uci.set('wireless', section_id, 'encryption', 'psk2');
break;
}
else if (enc.wpa[i] == 1) {
uci.set('wireless', section_id, 'encryption', 'psk');
break;
}
}
uci.set('wireless', section_id, 'key', passval);
}
else if (is_wep) {
uci.set('wireless', section_id, 'encryption', 'wep-open');
uci.set('wireless', section_id, 'key', '1');
uci.set('wireless', section_id, 'key1', passval);
}
else {
uci.set('wireless', section_id, 'encryption', 'none');
}
return network.addNetwork(nameval, { proto: 'dhcp' }).then(function(net) {
firewall.deleteNetwork(net.getName());
var zonePromise = zoneval
? firewall.getZone(zoneval).then(function(zone) { return zone || firewall.addZone(zoneval) })
: Promise.resolve();
return zonePromise.then(function(zone) {
if (zone)
zone.addNetwork(net.getName());
});
});
}).then(L.bind(function() {
return this.renderMoreOptionsModal(section_id);
}, this));
};
s.handleJoin = function(radioDev, bss, ev) {
this.handleScanAbort(ev);
var m2 = new form.Map('wireless'),
s2 = m2.section(form.NamedSection, '_new_'),
enc = L.isObject(bss.encryption) ? bss.encryption : null,
is_wep = (enc && Array.isArray(enc.wep)),
is_psk = (enc && Array.isArray(enc.wpa) && L.toArray(enc.authentication).filter(function(a) { return a == 'psk' || a == 'sae' })),
replace, passphrase, name, bssid, zone;
var nameUsed = function(name) {
var s = uci.get('network', name);
if (s != null && s['.type'] != 'interface')
return true;
var net = (s != null) ? network.instantiateNetwork(name) : null;
return (net != null && !net.isEmpty());
};
s2.render = function() {
return Promise.all([
{},
this.renderUCISection('_new_')
]).then(this.renderContents.bind(this));
};
replace = s2.option(form.Flag, 'replace', _('Replace wireless configuration'), _('Check this option to delete the existing networks from this radio.'));
name = s2.option(form.Value, 'name', _('Name of the new network'), _('The allowed characters are: <code>A-Z</code>, <code>a-z</code>, <code>0-9</code> and <code>_</code>'));
name.datatype = 'uciname';
name.default = 'wwan';
name.rmempty = false;
name.validate = function(section_id, value) {
if (nameUsed(value))
return _('The network name is already used');
return true;
};
for (var i = 2; nameUsed(name.default); i++)
name.default = 'wwan%d'.format(i);
if (is_wep || is_psk) {
passphrase = s2.option(form.Value, 'password', is_wep ? _('WEP passphrase') : _('WPA passphrase'), _('Specify the secret encryption key here.'));
passphrase.datatype = is_wep ? 'wepkey' : 'wpakey';
passphrase.password = true;
passphrase.rmempty = false;
}
if (bss.ssid != null) {
bssid = s2.option(form.Flag, 'bssid', _('Lock to BSSID'), _('Instead of joining any network with a matching SSID, only connect to the BSSID <code>%h</code>.').format(bss.bssid));
bssid.default = '0';
}
zone = s2.option(widgets.ZoneSelect, 'zone', _('Create / Assign firewall-zone'), _('Choose the firewall zone you want to assign to this interface. Select <em>unspecified</em> to remove the interface from the associated zone or fill out the <em>create</em> field to define a new zone and attach the interface to it.'));
zone.default = 'wan';
return m2.render().then(L.bind(function(nodes) {
ui.showModal(_('Joining Network: %q').replace(/%q/, '"%h"'.format(bss.ssid)), [
nodes,
E('div', { 'class': 'right' }, [
E('button', {
'class': 'btn',
'click': ui.hideModal
}, _('Cancel')), ' ',
E('button', {
'class': 'cbi-button cbi-button-positive important',
'click': ui.createHandlerFn(this, 'handleJoinConfirm', radioDev, bss, m2)
}, _('Submit'))
])
], 'cbi-modal').querySelector('[id="%s"] input[class][type]'.format((passphrase || name).cbid('_new_'))).focus();
}, this));
};
s.handleAdd = function(radioDev, ev) {
var section_id = next_free_sid(uci.sections('wireless', 'wifi-iface').length);
uci.unset('wireless', radioDev.getName(), 'disabled');
uci.add('wireless', 'wifi-iface', section_id);
uci.set('wireless', section_id, 'device', radioDev.getName());
uci.set('wireless', section_id, 'mode', 'ap');
uci.set('wireless', section_id, 'ssid', 'OpenWrt');
uci.set('wireless', section_id, 'encryption', 'none');
this.addedSection = section_id;
return this.renderMoreOptionsModal(section_id);
};
o = s.option(form.DummyValue, '_badge');
o.modalonly = false;
o.textvalue = function(section_id) {
var inst = this.section.lookupRadioOrNetwork(section_id),
node = E('div', { 'class': 'center' });
if (inst.getWifiNetworks)
node.appendChild(render_radio_badge(inst));
else
node.appendChild(render_network_badge(inst));
return node;
};
o = s.option(form.DummyValue, '_stat');
o.modalonly = false;
o.textvalue = function(section_id) {
var inst = this.section.lookupRadioOrNetwork(section_id);
if (inst.getWifiNetworks)
return render_radio_status(inst, this.section.wifis.filter(function(e) {
return (e.getWifiDeviceName() == inst.getName());
}));
else
return render_network_status(inst);
};
return m.render().then(L.bind(function(m, nodes) {
L.Poll.add(L.bind(function() {
var section_ids = m.children[0].cfgsections(),
tasks = [ network.getHostHints(), network.getWifiDevices() ];
for (var i = 0; i < section_ids.length; i++) {
var row = nodes.querySelector('.cbi-section-table-row[data-sid="%s"]'.format(section_ids[i])),
dsc = row.querySelector('[data-name="_stat"] > div'),
btns = row.querySelectorAll('.cbi-section-actions button');
if (dsc.getAttribute('restart') == '') {
dsc.setAttribute('restart', '1');
tasks.push(fs.exec('/sbin/wifi', ['up', section_ids[i]]).catch(function(e) {
ui.addNotification(null, E('p', e.message));
}));
}
else if (dsc.getAttribute('restart') == '1') {
dsc.removeAttribute('restart');
btns[0].classList.remove('spinning');
btns[0].disabled = false;
}
}
return Promise.all(tasks)
.then(L.bind(function(hosts_radios) {
var tasks = [];
for (var i = 0; i < hosts_radios[1].length; i++)
tasks.push(hosts_radios[1][i].getWifiNetworks());
return Promise.all(tasks).then(function(data) {
hosts_radios[2] = [];
for (var i = 0; i < data.length; i++)
hosts_radios[2].push.apply(hosts_radios[2], data[i]);
return hosts_radios;
});
}, network))
.then(L.bind(function(hosts_radios_wifis) {
var tasks = [];
for (var i = 0; i < hosts_radios_wifis[2].length; i++)
tasks.push(hosts_radios_wifis[2][i].getAssocList());
return Promise.all(tasks).then(function(data) {
hosts_radios_wifis[3] = [];
for (var i = 0; i < data.length; i++) {
var wifiNetwork = hosts_radios_wifis[2][i],
radioDev = hosts_radios_wifis[1].filter(function(d) { return d.getName() == wifiNetwork.getWifiDeviceName() })[0];
for (var j = 0; j < data[i].length; j++)
hosts_radios_wifis[3].push(Object.assign({ radio: radioDev, network: wifiNetwork }, data[i][j]));
}
return hosts_radios_wifis;
});
}, network))
.then(L.bind(this.poll_status, this, nodes));
}, this), 5);
var table = E('div', { 'class': 'table assoclist', 'id': 'wifi_assoclist_table' }, [
E('div', { 'class': 'tr table-titles' }, [
E('div', { 'class': 'th nowrap' }, _('Network')),
E('div', { 'class': 'th hide-xs' }, _('MAC-Address')),
E('div', { 'class': 'th' }, _('Host')),
E('div', { 'class': 'th' }, _('Signal / Noise')),
E('div', { 'class': 'th' }, _('RX Rate / TX Rate'))
])
]);
cbi_update_table(table, [], E('em', { 'class': 'spinning' }, _('Collecting data...')))
return E([ nodes, E('h3', _('Associated Stations')), table ]);
}, this, m));
}
});
| luci-mod-network: add WPA3-EAP and mixed WPA2/WPA3-EAP settings
Fixes: #3363
Signed-off-by: Jo-Philipp Wich <[email protected]>
| modules/luci-mod-network/htdocs/luci-static/resources/view/network/wireless.js | luci-mod-network: add WPA3-EAP and mixed WPA2/WPA3-EAP settings | <ide><path>odules/luci-mod-network/htdocs/luci-static/resources/view/network/wireless.js
<ide> sid = 'wifinet' + (++offset);
<ide>
<ide> return sid;
<add>}
<add>
<add>function add_dependency_permutations(o, deps) {
<add> var res = null;
<add>
<add> for (var key in deps) {
<add> if (!deps.hasOwnProperty(key) || !Array.isArray(deps[key]))
<add> continue;
<add>
<add> var list = deps[key],
<add> tmp = [];
<add>
<add> for (var j = 0; j < list.length; j++) {
<add> for (var k = 0; k < (res ? res.length : 1); k++) {
<add> var item = (res ? Object.assign({}, res[k]) : {});
<add> item[key] = list[j];
<add> tmp.push(item);
<add> }
<add> }
<add>
<add> res = tmp;
<add> }
<add>
<add> for (var i = 0; i < (res ? res.length : 0); i++)
<add> o.depends(res[i]);
<ide> }
<ide>
<ide> var CBIWifiFrequencyValue = form.Value.extend({
<ide> var e = this.section.children.filter(function(o) { return o.option == 'encryption' })[0].formvalue(section_id),
<ide> co = this.section.children.filter(function(o) { return o.option == 'cipher' })[0], c = co.formvalue(section_id);
<ide>
<del> if (value == 'wpa' || value == 'wpa2')
<add> if (value == 'wpa' || value == 'wpa2' || value == 'wpa3' || value == 'wpa3-mixed')
<ide> uci.unset('wireless', section_id, 'key');
<ide>
<ide> if (co.isActive(section_id) && e && (c == 'tkip' || c == 'ccmp' || c == 'tkip+ccmp'))
<ide> o = ss.taboption('encryption', form.ListValue, 'cipher', _('Cipher'));
<ide> o.depends('encryption', 'wpa');
<ide> o.depends('encryption', 'wpa2');
<add> o.depends('encryption', 'wpa3');
<add> o.depends('encryption', 'wpa3-mixed');
<ide> o.depends('encryption', 'psk');
<ide> o.depends('encryption', 'psk2');
<ide> o.depends('encryption', 'wpa-mixed');
<ide> var has_ap_owe = L.hasSystemFeature('hostapd', 'owe'),
<ide> has_sta_owe = L.hasSystemFeature('wpasupplicant', 'owe');
<ide>
<add> // Probe Suite-B support
<add> var has_ap_eap192 = L.hasSystemFeature('hostapd', 'suiteb192'),
<add> has_sta_eap192 = L.hasSystemFeature('wpasupplicant', 'suiteb192');
<add>
<ide>
<ide> if (has_hostapd || has_supplicant) {
<del> crypto_modes.push(['psk2', 'WPA2-PSK', 33]);
<add> crypto_modes.push(['psk2', 'WPA2-PSK', 35]);
<ide> crypto_modes.push(['psk-mixed', 'WPA-PSK/WPA2-PSK Mixed Mode', 22]);
<ide> crypto_modes.push(['psk', 'WPA-PSK', 21]);
<ide> }
<ide> }
<ide>
<ide> if (has_ap_eap || has_sta_eap) {
<del> crypto_modes.push(['wpa2', 'WPA2-EAP', 32]);
<add> if (has_ap_eap192 || has_sta_eap192) {
<add> crypto_modes.push(['wpa3', 'WPA3-EAP', 33]);
<add> crypto_modes.push(['wpa3-mixed', 'WPA2-EAP/WPA3-EAP Mixed Mode', 32]);
<add> }
<add>
<add> crypto_modes.push(['wpa2', 'WPA2-EAP', 34]);
<ide> crypto_modes.push(['wpa', 'WPA-EAP', 20]);
<ide> }
<ide>
<ide> 'sae-mixed': has_ap_sae || _('Requires hostapd with SAE support'),
<ide> 'wpa': has_ap_eap || _('Requires hostapd with EAP support'),
<ide> 'wpa2': has_ap_eap || _('Requires hostapd with EAP support'),
<add> 'wpa3': has_ap_eap192 || _('Requires hostapd with EAP Suite-B support'),
<add> 'wpa3-mixed': has_ap_eap192 || _('Requires hostapd with EAP Suite-B support'),
<ide> 'owe': has_ap_owe || _('Requires hostapd with OWE support')
<ide> },
<ide> 'sta': {
<ide> 'sae-mixed': has_sta_sae || _('Requires wpa-supplicant with SAE support'),
<ide> 'wpa': has_sta_eap || _('Requires wpa-supplicant with EAP support'),
<ide> 'wpa2': has_sta_eap || _('Requires wpa-supplicant with EAP support'),
<add> 'wpa3': has_sta_eap192 || _('Requires wpa-supplicant with EAP Suite-B support'),
<add> 'wpa3-mixed': has_sta_eap192 || _('Requires wpa-supplicant with EAP Suite-B support'),
<ide> 'owe': has_sta_owe || _('Requires wpa-supplicant with OWE support')
<ide> },
<ide> 'adhoc': {
<ide>
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'auth_server', _('Radius-Authentication-Server'));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.rmempty = true;
<ide> o.datatype = 'host(0)';
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'auth_port', _('Radius-Authentication-Port'), _('Default %d').format(1812));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.rmempty = true;
<ide> o.datatype = 'port';
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'auth_secret', _('Radius-Authentication-Secret'));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.rmempty = true;
<ide> o.password = true;
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'acct_server', _('Radius-Accounting-Server'));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.rmempty = true;
<ide> o.datatype = 'host(0)';
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'acct_port', _('Radius-Accounting-Port'), _('Default %d').format(1813));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.rmempty = true;
<ide> o.datatype = 'port';
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'acct_secret', _('Radius-Accounting-Secret'));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.rmempty = true;
<ide> o.password = true;
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'dae_client', _('DAE-Client'));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.rmempty = true;
<ide> o.datatype = 'host(0)';
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'dae_port', _('DAE-Port'), _('Default %d').format(3799));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.rmempty = true;
<ide> o.datatype = 'port';
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'dae_secret', _('DAE-Secret'));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.rmempty = true;
<ide> o.password = true;
<ide>
<ide> var has_80211r = L.hasSystemFeature('hostapd', '11r') || L.hasSystemFeature('hostapd', 'eap');
<ide>
<ide> o = ss.taboption('encryption', form.Flag, 'ieee80211r', _('802.11r Fast Transition'), _('Enables fast roaming among access points that belong to the same Mobility Domain'));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<del> if (has_80211r) {
<del> o.depends({ mode: 'ap', encryption: 'psk' });
<del> o.depends({ mode: 'ap', encryption: 'psk2' });
<del> o.depends({ mode: 'ap', encryption: 'psk-mixed' });
<del> o.depends({ mode: 'ap', encryption: 'sae' });
<del> o.depends({ mode: 'ap', encryption: 'sae-mixed' });
<del> o.depends({ mode: 'ap-wds', encryption: 'psk' });
<del> o.depends({ mode: 'ap-wds', encryption: 'psk2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'psk-mixed' });
<del> o.depends({ mode: 'ap-wds', encryption: 'sae' });
<del> o.depends({ mode: 'ap-wds', encryption: 'sae-mixed' });
<del> }
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<add> if (has_80211r)
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['psk', 'psk2', 'psk-mixed', 'sae', 'sae-mixed'] });
<ide> o.rmempty = true;
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'nasid', _('NAS ID'), _('Used for two different purposes: RADIUS NAS ID and 802.11r R0KH-ID. Not needed with normal WPA(2)-PSK.'));
<del> o.depends({ mode: 'ap', encryption: 'wpa' });
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.depends({ ieee80211r: '1' });
<ide> o.rmempty = true;
<ide>
<ide> o.value('ttls', 'TTLS');
<ide> o.value('peap', 'PEAP');
<ide> o.value('fast', 'FAST');
<del> o.depends({ mode: 'sta', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide>
<ide> o = ss.taboption('encryption', form.Flag, 'ca_cert_usesystem', _('Use system certificates'), _("Validate server certificate using built-in system CA bundle,<br />requires the \"ca-bundle\" package"));
<ide> o.enabled = '1';
<ide> o.disabled = '0';
<ide> o.default = o.disabled;
<del> o.depends({ mode: 'sta', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide> o.validate = function(section_id, value) {
<ide> if (value == '1' && !L.hasSystemFeature('cabundle')) {
<ide> return _("This option cannot be used because the ca-bundle package is not installed.");
<ide> };
<ide>
<ide> o = ss.taboption('encryption', form.FileUpload, 'ca_cert', _('Path to CA-Certificate'));
<del> o.depends({ mode: 'sta', encryption: 'wpa', ca_cert_usesystem: '0' });
<del> o.depends({ mode: 'sta', encryption: 'wpa2', ca_cert_usesystem: '0' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa', ca_cert_usesystem: '0' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa2', ca_cert_usesystem: '0' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], ca_cert_usesystem: ['0'] });
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'subject_match', _('Certificate constraint (Subject)'), _("Certificate constraint substring - e.g. /CN=wifi.mycompany.com<br />See `logread -f` during handshake for actual values"));
<del> o.depends({ mode: 'sta', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide>
<ide> o = ss.taboption('encryption', form.DynamicList, 'altsubject_match', _('Certificate constraint (SAN)'), _("Certificate constraint(s) via Subject Alternate Name values<br />(supported attributes: EMAIL, DNS, URI) - e.g. DNS:wifi.mycompany.com"));
<del> o.depends({ mode: 'sta', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide>
<ide> o = ss.taboption('encryption', form.DynamicList, 'domain_match', _('Certificate constraint (Domain)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (exact match)"));
<del> o.depends({ mode: 'sta', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide>
<ide> o = ss.taboption('encryption', form.DynamicList, 'domain_suffix_match', _('Certificate constraint (Wildcard)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (suffix match)"));
<del> o.depends({ mode: 'sta', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide>
<ide> o = ss.taboption('encryption', form.FileUpload, 'client_cert', _('Path to Client-Certificate'));
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['tls'] });
<ide>
<ide> o = ss.taboption('encryption', form.FileUpload, 'priv_key', _('Path to Private Key'));
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['tls'] });
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'priv_key_pwd', _('Password of Private Key'));
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['tls'] });
<ide> o.password = true;
<ide>
<ide> o = ss.taboption('encryption', form.ListValue, 'auth', _('Authentication'));
<ide> o.value('CHAP', 'CHAP');
<ide> o.value('MSCHAP', 'MSCHAP');
<ide> o.value('MSCHAPV2', 'MSCHAPv2');
<del> o.value('EAP-GTC');
<del> o.value('EAP-MD5');
<del> o.value('EAP-MSCHAPV2');
<del> o.value('EAP-TLS');
<del> o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa' });
<add> o.value('EAP-GTC', 'EAP-GTC');
<add> o.value('EAP-MD5', 'EAP-MD5');
<add> o.value('EAP-MSCHAPV2', 'EAP-MSCHAPv2');
<add> o.value('EAP-TLS', 'EAP-TLS');
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['fast', 'peap', 'ttls'] });
<ide>
<ide> o.validate = function(section_id, value) {
<ide> var eo = this.section.children.filter(function(o) { return o.option == 'eap_type' })[0],
<ide> o.enabled = '1';
<ide> o.disabled = '0';
<ide> o.default = o.disabled;
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
<ide> o.validate = function(section_id, value) {
<ide> if (value == '1' && !L.hasSystemFeature('cabundle')) {
<ide> return _("This option cannot be used because the ca-bundle package is not installed.");
<ide> };
<ide>
<ide> o = ss.taboption('encryption', form.FileUpload, 'ca_cert2', _('Path to inner CA-Certificate'));
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa', ca_cert2_usesystem: '0' });
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2', ca_cert2_usesystem: '0' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa', ca_cert2_usesystem: '0' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2', ca_cert2_usesystem: '0' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'], ca_cert2_usesystem: ['0'] });
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'subject_match2', _('Inner certificate constraint (Subject)'), _("Certificate constraint substring - e.g. /CN=wifi.mycompany.com<br />See `logread -f` during handshake for actual values"));
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
<ide>
<ide> o = ss.taboption('encryption', form.DynamicList, 'altsubject_match2', _('Inner certificate constraint (SAN)'), _("Certificate constraint(s) via Subject Alternate Name values<br />(supported attributes: EMAIL, DNS, URI) - e.g. DNS:wifi.mycompany.com"));
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
<ide>
<ide> o = ss.taboption('encryption', form.DynamicList, 'domain_match2', _('Inner certificate constraint (Domain)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (exact match)"));
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
<ide>
<ide> o = ss.taboption('encryption', form.DynamicList, 'domain_suffix_match2', _('Inner certificate constraint (Wildcard)'), _("Certificate constraint(s) against DNS SAN values (if available)<br />or Subject CN (suffix match)"));
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
<ide>
<ide> o = ss.taboption('encryption', form.FileUpload, 'client_cert2', _('Path to inner Client-Certificate'));
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
<ide>
<ide> o = ss.taboption('encryption', form.FileUpload, 'priv_key2', _('Path to inner Private Key'));
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'priv_key2_pwd', _('Password of inner Private Key'));
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', auth: 'EAP-TLS', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', auth: 'EAP-TLS', encryption: 'wpa2' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], auth: ['EAP-TLS'] });
<ide> o.password = true;
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'identity', _('Identity'));
<del> o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['fast', 'peap', 'tls', 'ttls'] });
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'anonymous_identity', _('Anonymous Identity'));
<del> o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'tls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'tls', encryption: 'wpa' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['fast', 'peap', 'tls', 'ttls'] });
<ide>
<ide> o = ss.taboption('encryption', form.Value, 'password', _('Password'));
<del> o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'fast', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'peap', encryption: 'wpa' });
<del> o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', eap_type: 'ttls', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'fast', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'peap', encryption: 'wpa' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', eap_type: 'ttls', encryption: 'wpa' });
<add> add_dependency_permutations(o, { mode: ['sta', 'sta-wds'], encryption: ['wpa', 'wpa2', 'wpa3', 'wpa3-mixed'], eap_type: ['fast', 'peap', 'ttls'] });
<ide> o.password = true;
<ide>
<ide>
<ide> o.value('', _('Disabled'));
<ide> o.value('1', _('Optional'));
<ide> o.value('2', _('Required'));
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap', encryption: 'psk2' });
<del> o.depends({ mode: 'ap', encryption: 'psk-mixed' });
<del> o.depends({ mode: 'ap', encryption: 'sae' });
<del> o.depends({ mode: 'ap', encryption: 'sae-mixed' });
<del> o.depends({ mode: 'ap', encryption: 'owe' });
<del> o.depends({ mode: 'ap-wds', encryption: 'psk2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'psk-mixed' });
<del> o.depends({ mode: 'ap-wds', encryption: 'sae' });
<del> o.depends({ mode: 'ap-wds', encryption: 'sae-mixed' });
<del> o.depends({ mode: 'ap-wds', encryption: 'owe' });
<del> o.depends({ mode: 'sta', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta-wds', encryption: 'wpa2' });
<del> o.depends({ mode: 'sta', encryption: 'psk2' });
<del> o.depends({ mode: 'sta', encryption: 'psk-mixed' });
<del> o.depends({ mode: 'sta', encryption: 'sae' });
<del> o.depends({ mode: 'sta', encryption: 'sae-mixed' });
<del> o.depends({ mode: 'sta', encryption: 'owe' });
<del> o.depends({ mode: 'sta-wds', encryption: 'psk2' });
<del> o.depends({ mode: 'sta-wds', encryption: 'psk-mixed' });
<del> o.depends({ mode: 'sta-wds', encryption: 'sae' });
<del> o.depends({ mode: 'sta-wds', encryption: 'sae-mixed' });
<del> o.depends({ mode: 'sta-wds', encryption: 'owe' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds', 'sta', 'sta-wds'], encryption: ['owe', 'psk2', 'psk-mixed', 'sae', 'sae-mixed', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<add>
<ide> o.defaults = {
<del> '2': [{ encryption: 'sae' }, { encryption: 'owe' }],
<add> '2': [{ encryption: 'sae' }, { encryption: 'owe' }, { encryption: 'wpa3' }, { encryption: 'wpa3-mixed' }],
<ide> '1': [{ encryption: 'sae-mixed'}],
<ide> '': []
<ide> };
<ide> };
<ide>
<ide> o = ss.taboption('encryption', form.Flag, 'wpa_disable_eapol_key_retries', _('Enable key reinstallation (KRACK) countermeasures'), _('Complicates key reinstallation attacks on the client side by disabling retransmission of EAPOL-Key frames that are used to install keys. This workaround might cause interoperability issues and reduced robustness of key negotiation especially in environments with heavy traffic load.'));
<del> o.depends({ mode: 'ap', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap', encryption: 'psk2' });
<del> o.depends({ mode: 'ap', encryption: 'psk-mixed' });
<del> o.depends({ mode: 'ap', encryption: 'sae' });
<del> o.depends({ mode: 'ap', encryption: 'sae-mixed' });
<del> o.depends({ mode: 'ap-wds', encryption: 'wpa2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'psk2' });
<del> o.depends({ mode: 'ap-wds', encryption: 'psk-mixed' });
<del> o.depends({ mode: 'ap-wds', encryption: 'sae' });
<del> o.depends({ mode: 'ap-wds', encryption: 'sae-mixed' });
<add> add_dependency_permutations(o, { mode: ['ap', 'ap-wds'], encryption: ['psk2', 'psk-mixed', 'sae', 'sae-mixed', 'wpa2', 'wpa3', 'wpa3-mixed'] });
<ide>
<ide> if (L.hasSystemFeature('hostapd', 'cli') && L.hasSystemFeature('wpasupplicant')) {
<ide> o = ss.taboption('encryption', form.Flag, 'wps_pushbutton', _('Enable WPS pushbutton, requires WPA(2)-PSK/WPA3-SAE')) |
|
JavaScript | agpl-3.0 | 0d53c46f613907118fd376df9ce411522491c367 | 0 | yashodhank/xibo-cms,dasgarner/xibo-cms,alexhuang888/xibo-cms,xibosignage/xibo-cms,PeterMis/xibo-cms,PeterMis/xibo-cms,guruevi/xibo-cms,xibosignage/xibo-cms,alexharrington/xibo-cms,PeterMis/xibo-cms,PeterMis/xibo-cms,ajiwo/xibo-cms,xibosignage/xibo-cms,yashodhank/xibo-cms,dasgarner/xibo-cms,alexharrington/xibo-cms,ajiwo/xibo-cms,guruevi/xibo-cms,ajiwo/xibo-cms,alexharrington/xibo-cms,alexharrington/xibo-cms,xibosignage/xibo-cms,xibosignage/xibo-cms,PeterMis/xibo-cms,dasgarner/xibo-cms,guruevi/xibo-cms,xibosignage/xibo-cms,dasgarner/xibo-cms,yashodhank/xibo-cms,alexharrington/xibo-cms,alexhuang888/xibo-cms,dasgarner/xibo-cms,alexhuang888/xibo-cms,dasgarner/xibo-cms | /**
* Xibo - Digital Signage - http://www.xibo.org.uk
* Copyright (C) 2006-2015 Daniel Garner
*
* This file is part of Xibo.
*
* Xibo is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* Xibo is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Xibo. If not, see <http://www.gnu.org/licenses/>.
*/
$(document).ready(function(){
// Set the height of the grid to be something sensible for the current screen resolution
$("#layoutJumpList").change(function(){
window.location = $(this).val();
}).selectpicker();
$("#layout").each(function() {
// Only enable drag / drop if we are within a certain limit
if ($(this).attr("designer_scale") > 0.41) {
$(this).find(".region")
.draggable({
containment: this,
stop: regionPositionUpdate,
drag: updateRegionInfo
})
.resizable({
containment: this,
minWidth: 25,
minHeight: 25,
stop: regionPositionUpdate,
resize: updateRegionInfo
});
}
$(this).find(".region")
.hover(function() {
$(this).find(".regionInfo").show();
$(this).find(".previewNav").show();
},
function() {
$(this).find(".regionInfo").hide();
$(this).find(".previewNav").hide();
});
// Preview
$('.regionPreview', this).each(function(){
new Preview(this);
});
// Set an interval
XiboPing($(this).data('statusUrl'), '.layout-status');
setInterval("XiboPing('" + $(this).data('statusUrl') + "', '.layout-status')", 1000 * 60); // Every minute
});
$('.RegionOptionsMenuItem').click(function(e) {
e.preventDefault();
// If any regions have been moved, then save them.
if ($("#layout-save-all").length > 0) {
SystemMessage(translation.savePositionsFirst, true);
return;
}
var data = {
layoutid: $(this).closest('.region').attr("layoutid"),
regionid: $(this).closest('.region').attr("regionid"),
scale: $(this).closest('.region').attr("tip_scale"),
zoom: $(this).closest('.layout').attr("zoom")
};
var url = $(this).prop("href");
XiboFormRender(url, data);
});
setTimeout(function() {
$(".region .regionInfo").hide("200");
$(".region .previewNav").hide("200");
}, 500);
});
/**
* Update Region Information with Latest Width/Position
* @param {[type]} e [description]
* @param {[type]} ui [description]
* @return {[type]} [description]
*/
function updateRegionInfo(e, ui) {
var pos = $(this).position();
var scale = ($(this).closest('.layout').attr("version") == 1) ? (1 / $(this).attr("tip_scale")) : $(this).attr("designer_scale");
$('.region-tip', this).html(Math.round($(this).width() / scale, 0) + " x " + Math.round($(this).height() / scale, 0) + " (" + Math.round(pos.left / scale, 0) + "," + Math.round(pos.top / scale, 0) + ")");
}
function regionPositionUpdate(e, ui) {
var width = $(this).css("width");
var height = $(this).css("height");
var regionid = $(this).attr("regionid");
// Update the region width / height attributes
$(this).attr("width", width).attr("height", height);
// Update the Preview for the new sizing
var preview = Preview.instances[regionid];
preview.SetSequence(preview.seq);
// Expose a new button to save the positions
if ($("#layout-save-all").length <= 0) {
$("<button/>", {
"class": "btn",
id: "layout-save-all",
html: translation.save_position_button
})
.click(function() {
// Save positions for all layouts / regions
savePositions();
return false;
})
.appendTo(".layout-meta");
$("<button/>", {
"class": "btn",
id: "layout-revert",
html: translation.revert_position_button
})
.click(function() {
// Reload
location.reload();
return false;
})
.appendTo(".layout-meta");
}
}
function savePositions() {
// Ditch the button
$("#layout-save-all").remove();
$("#layout-revert").remove();
// Update all layouts
$("#layout").each(function(){
// Store the Layout ID
var url = $(this).data().positionAllUrl;
// Build an array of
var regions = new Array();
$(this).find(".region").each(function(){
var designer_scale = $(this).attr("designer_scale");
var position = $(this).position();
var region = {
width: $(this).width() / designer_scale,
height: $(this).height() / designer_scale,
top: position.top / designer_scale,
left: position.left / designer_scale,
regionid: $(this).attr("regionid")
};
// Update the region width / height attributes
$(this).attr("width", region.width).attr("height", region.height);
// Add to the array
regions.push(region);
});
$.ajax({
type: "put",
url: url,
cache: false,
dataType: "json",
data: {regions : JSON.stringify(regions) },
success: XiboSubmitResponse
});
});
}
/**
* Sets the layout to full screen
*/
function setFullScreenLayout(width, height) {
$('#width', '.XiboForm').val(width);
$('#height', '.XiboForm').val(height);
$('#top', '.XiboForm').val('0');
$('#left', '.XiboForm').val('0');
}
function refreshPreview(regionId) {
// Refresh the preview
var preview = Preview.instances[regionId];
preview.SetSequence(preview.seq);
}
var loadTimeLineCallback = function(dialog) {
dialog.addClass("modal-big");
refreshPreview($('#timelineControl').attr('regionid'));
$("li.timelineMediaListItem").hover(function() {
var position = $(this).position();
var scale = $('#layout').attr('designer_scale');
// Change the hidden div's content
$("div#timelinePreview")
.html($("div.timelineMediaPreview", this).html())
.css({
"margin-top": position.top + $('#timelineControl').closest('.modal-body').scrollTop()
})
.show();
$("#timelinePreview .hoverPreview").css({
width: $("div#timelinePreview").width() / scale,
transform: "scale(" + scale + ")",
"transform-origin": "0 0 ",
background: $('#layout').css('background-color')
})
}, function() {
return false;
});
$(".timelineSortableListOfMedia").sortable();
// Hook up the library Upload Buttons
$(".libraryUploadForm").click(libraryUploadClick);
};
var XiboTimelineSaveOrder = function(timelineDiv) {
var url = $("#" + timelineDiv).data().orderUrl;
var i = 0;
var widgets = {};
$('#' + timelineDiv + ' li.timelineMediaListItem').each(function() {
i++;
widgets[$(this).attr("widgetid")] = i;
});
console.log(widgets);
// Call the server to do the reorder
$.ajax({
type:"post",
url: url,
cache:false,
dataType:"json",
data:{
"widgets": widgets
},
success: [
XiboSubmitResponse,
afterTimeLineSaveOrder
]
});
};
var afterTimeLineSaveOrder = function() {
$('.regionPreview').each(function(idx, el) {
refreshPreview($(el).attr("regionid"));
});
};
var LibraryAssignSubmit = function() {
// Collect our media
var media = [];
$("#LibraryAssignSortable > li").each(function() {
media.push($(this).data().mediaId);
});
assignMediaToPlaylist($("#LibraryAssign").data().url, media);
};
var assignMediaToPlaylist = function(url, media) {
toastr.info(media, "Assign Media to Playlist");
$.ajax({
type: "post",
url: url,
cache: false,
dataType: "json",
data: {media: media},
success: XiboSubmitResponse
});
};
| web/theme/default/js/xibo-layout-designer.js | /**
* Xibo - Digital Signage - http://www.xibo.org.uk
* Copyright (C) 2006-2015 Daniel Garner
*
* This file is part of Xibo.
*
* Xibo is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* Xibo is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Xibo. If not, see <http://www.gnu.org/licenses/>.
*/
$(document).ready(function(){
// Set the height of the grid to be something sensible for the current screen resolution
$("#layoutJumpList").change(function(){
window.location = $(this).val();
}).selectpicker();
$("#layout").each(function() {
// Only enable drag / drop if we are within a certain limit
if ($(this).attr("designer_scale") > 0.41) {
$(this).find(".region")
.draggable({
containment: this,
stop: regionPositionUpdate,
drag: updateRegionInfo
})
.resizable({
containment: this,
minWidth: 25,
minHeight: 25,
stop: regionPositionUpdate,
resize: updateRegionInfo
});
}
$(this).find(".region")
.hover(function() {
$(this).find(".regionInfo").show();
$(this).find(".previewNav").show();
},
function() {
$(this).find(".regionInfo").hide();
$(this).find(".previewNav").hide();
});
// Preview
$('.regionPreview', this).each(function(){
new Preview(this);
});
// Set an interval
XiboPing($(this).data('statusUrl'), '.layout-status');
setInterval("XiboPing('" + $(this).data('statusUrl') + "', '.layout-status')", 1000 * 60); // Every minute
});
$('.RegionOptionsMenuItem').click(function(e) {
e.preventDefault();
// If any regions have been moved, then save them.
if ($("#layout-save-all").length > 0) {
SystemMessage(translation.savePositionsFirst, true);
return;
}
var data = {
layoutid: $(this).closest('.region').attr("layoutid"),
regionid: $(this).closest('.region').attr("regionid"),
scale: $(this).closest('.region').attr("tip_scale"),
zoom: $(this).closest('.layout').attr("zoom")
};
var url = $(this).prop("href");
XiboFormRender(url, data);
});
setTimeout(function() {
$(".region .regionInfo").hide("200");
$(".region .previewNav").hide("200");
}, 500);
});
/**
* Update Region Information with Latest Width/Position
* @param {[type]} e [description]
* @param {[type]} ui [description]
* @return {[type]} [description]
*/
function updateRegionInfo(e, ui) {
var pos = $(this).position();
var scale = ($(this).closest('.layout').attr("version") == 1) ? (1 / $(this).attr("tip_scale")) : $(this).attr("designer_scale");
$('.region-tip', this).html(Math.round($(this).width() / scale, 0) + " x " + Math.round($(this).height() / scale, 0) + " (" + Math.round(pos.left / scale, 0) + "," + Math.round(pos.top / scale, 0) + ")");
}
function regionPositionUpdate(e, ui) {
var width = $(this).css("width");
var height = $(this).css("height");
var regionid = $(this).attr("regionid");
// Update the region width / height attributes
$(this).attr("width", width).attr("height", height);
// Update the Preview for the new sizing
var preview = Preview.instances[regionid];
preview.SetSequence(preview.seq);
// Expose a new button to save the positions
if ($("#layout-save-all").length <= 0) {
$("<button/>", {
"class": "btn",
id: "layout-save-all",
html: translation.save_position_button
})
.click(function() {
// Save positions for all layouts / regions
savePositions();
return false;
})
.appendTo(".layout-meta");
$("<button/>", {
"class": "btn",
id: "layout-revert",
html: translation.revert_position_button
})
.click(function() {
// Reload
location.reload();
return false;
})
.appendTo(".layout-meta");
}
}
function savePositions() {
// Ditch the button
$("#layout-save-all").remove();
$("#layout-revert").remove();
// Update all layouts
$("#layout").each(function(){
// Store the Layout ID
var url = $(this).data().positionAllUrl;
// Build an array of
var regions = new Array();
$(this).find(".region").each(function(){
var designer_scale = $(this).attr("designer_scale");
var position = $(this).position();
var region = {
width: $(this).width() / designer_scale,
height: $(this).height() / designer_scale,
top: position.top / designer_scale,
left: position.left / designer_scale,
regionid: $(this).attr("regionid")
};
// Update the region width / height attributes
$(this).attr("width", region.width).attr("height", region.height);
// Add to the array
regions.push(region);
});
$.ajax({
type: "put",
url: url,
cache: false,
dataType: "json",
data: {regions : JSON.stringify(regions) },
success: XiboSubmitResponse
});
});
}
/**
* Sets the layout to full screen
*/
function setFullScreenLayout(width, height) {
$('#width', '.XiboForm').val(width);
$('#height', '.XiboForm').val(height);
$('#top', '.XiboForm').val('0');
$('#left', '.XiboForm').val('0');
}
function refreshPreview(regionId) {
// Refresh the preview
var preview = Preview.instances[regionId];
preview.SetSequence(preview.seq);
}
var loadTimeLineCallback = function(dialog) {
dialog.addClass("modal-big");
refreshPreview($('#timelineControl').attr('regionid'));
$("li.timelineMediaListItem").hover(function() {
var position = $(this).position();
var scale = $('#layout').attr('designer_scale');
// Change the hidden div's content
$("div#timelinePreview")
.html($("div.timelineMediaPreview", this).html())
.css({
"margin-top": position.top + $('#timelineControl').closest('.modal-body').scrollTop()
})
.show();
$("#timelinePreview .hoverPreview").css({
width: $("div#timelinePreview").width() / scale,
transform: "scale(" + scale + ")",
"transform-origin": "0 0 ",
background: $('#layout').css('background-color')
})
}, function() {
return false;
});
$(".timelineSortableListOfMedia").sortable();
// Hook up the library Upload Buttons
$(".libraryUploadForm").click(libraryUploadClick);
};
var XiboTimelineSaveOrder = function(timelineDiv) {
var url = $("#" + timelineDiv).data().orderUrl;
var i = 0;
var widgets = {};
$('#' + timelineDiv + ' li.timelineMediaListItem').each(function() {
i++;
widgets[$(this).attr("widgetid")] = i;
});
console.log(widgets);
// Call the server to do the reorder
$.ajax({
type:"post",
url: url,
cache:false,
dataType:"json",
data:{
"widgets": widgets
},
success: XiboSubmitResponse
});
};
var LibraryAssignSubmit = function() {
// Collect our media
var media = [];
$("#LibraryAssignSortable > li").each(function() {
media.push($(this).data().mediaId);
});
assignMediaToPlaylist($("#LibraryAssign").data().url, media);
};
var assignMediaToPlaylist = function(url, media) {
toastr.info(media, "Assign Media to Playlist");
$.ajax({
type: "post",
url: url,
cache: false,
dataType: "json",
data: {media: media},
success: XiboSubmitResponse
});
};
| Refresh designer preview after reordering media.
| web/theme/default/js/xibo-layout-designer.js | Refresh designer preview after reordering media. | <ide><path>eb/theme/default/js/xibo-layout-designer.js
<ide> data:{
<ide> "widgets": widgets
<ide> },
<del> success: XiboSubmitResponse
<del> });
<add> success: [
<add> XiboSubmitResponse,
<add> afterTimeLineSaveOrder
<add> ]
<add> });
<add>};
<add>
<add>var afterTimeLineSaveOrder = function() {
<add> $('.regionPreview').each(function(idx, el) {
<add> refreshPreview($(el).attr("regionid"));
<add> });
<ide> };
<ide>
<ide> var LibraryAssignSubmit = function() { |
|
JavaScript | mit | 0108c398ad0e890d4e7b313410332569071d8d7c | 0 | michaelgira23/MyMICDS-v2,michaelgira23/MyMICDS-v2 | 'use strict';
/**
* @file Scraps the lunch from the school website
* @module lunch
*/
var fs = require('fs-extra');
var request = require('request');
var cheerio = require('cheerio');
var utils = require(__dirname + '/utils.js');
var lunchURL = 'http://myschooldining.com/MICDS/calendarWeek';
var schools = ['Lower School', 'Middle School', 'Upper School'];
var JSONPath = __dirname + '/../public/json/weather.json';
/**
* Gets the lunch from /src/api/lunch.json. Will create one if it doesn't already exist.
* @function getLunch
*
* @param {Object} date - Object containing date to retrieve lunch. Leaving fields empty will default to today
* @param {Number} [date.year] - What year to get lunch (Optional. Defaults to current year.)
* @param {Number} [date.month] - Month number to get lunch. (1-12) (Optional. Defaults to current month.)
* @param {Number} [date.day] - Day of month to get lunch. (Optional. Defaults to current day.)
* @param {getLunchCallback} callback - Callback
*/
/**
* Returns JSON containing lunch for week
* @callback getLunchCallback
*
* @param {Object} err - Null if success, error object if failure.
* @param {Object} lunchJSON - JSON of lunch menu for the week. Null if error.
*/
function getLunch(date, callback) {
if(typeof callback !== 'function') return;
var current = new Date();
// Default date to current values
if(typeof date.year !== 'number' || date.year % 1 !== 0) {
date.year = current.getFullYear();
}
if(typeof date.month !== 'number' || date.month % 1 !== 0) {
date.month = current.getMonth() + 1;
}
if(typeof date.day !== 'number' || date.day % 1 !== 0) {
date.day = current.getDate();
}
var currentDay = new Date(date.year, date.month - 1, date.day);
// Send POST request to lunch website
request.post(lunchURL, { form: { 'current_day': currentDay }}, function(err, res, body) {
if(err) {
callback(new Error('There was a problem fetching the lunch data!'), null);
return;
}
if(res.statusCode !== 200) {
/**
* @TODO -
* This should never happen and could mean the URL changed.
* It should send email to MyMICDS Devs.
*/
callback(new Error('There was a problem with the lunch URL!'), null);
return;
}
var lunchJSON = parseLunch(body);
callback(null, lunchJSON);
});
}
/**
* Takes the body of the school's lunch page and returns lunch JSON
* @function parseLunch
*
* @param {string} body - Body of HTML
* @returns {Object}
*/
function parseLunch(body) {
// Clean up HTML to prevent cheerio from becoming confused
body.replace('<<', '<<');
body.replace('>>', '>>');
var $ = cheerio.load(body);
var json = {};
var table = $('table#table_calendar_week');
var weekColumns = table.find('td');
weekColumns.each(function(index) {
var day = $(this);
var date = day.attr('this_date');
var dateObject = new Date(date);
var dateString = dateObject.getFullYear()
+ '-' + utils.leadingZeros(dateObject.getMonth() + 1)
+ '-' + utils.leadingZeros(dateObject.getDate());
for(var i = 0; i < schools.length; i++) {
var school = schools[i];
var schoolLunch = day.find('div[location="' + school + '"]');
// Make sure it's not the weekend
if(schoolLunch.length > 0) {
var lunchTitle = schoolLunch.find('span.period-value').text().trim();
var categories = schoolLunch.find('div.category-week');
categories.each(function() {
var category = $(this);
var food = [];
var categoryTitle = category.find('span.category-value').text().trim();
var items = category.find('div.item-week');
items.each(function() {
food.push($(this).text().trim());
});
// Add to JSON
json[dateString] = json[dateString] || {};
json[dateString][schoolFilter(school)] = json[dateString][schoolFilter(school)] || {};
json[dateString][schoolFilter(school)]['title'] = lunchTitle;
json[dateString][schoolFilter(school)]['categories'] = json[dateString][schoolFilter(school)]['categories'] || {};
json[dateString][schoolFilter(school)]['categories'][categoryTitle] = json[dateString][schoolFilter(school)]['categories'][categoryTitle] || [];
for(var j = 0; j < food.length; j++) {
json[dateString][schoolFilter(school)]['categories'][categoryTitle].push(food[j]);
}
});
}
}
});
return json;
}
/**
* Removes spaces and makes whole string lowercase for JSON
* @function schoolFilter
* @param {string} school - String with school name
* @returns {string}
*/
function schoolFilter(school) {
return school.replace(/\s+/g, '').toLowerCase();
}
module.exports.get = getLunch;
module.exports.parse = parseLunch;
| src/libs/lunch.js | 'use strict';
/**
* @file Scraps the lunch from the school website
* @module lunch
*/
var fs = require('fs-extra');
var request = require('request');
var cheerio = require('cheerio');
var utils = require(__dirname + '/utils.js');
var lunchURL = 'http://myschooldining.com/MICDS/calendarWeek';
var schools = ['Lower School', 'Middle School', 'Upper School'];
var JSONPath = __dirname + '/../public/json/weather.json';
/**
* Gets the lunch from /src/api/lunch.json. Will create one if it doesn't already exist.
* @function getLunch
*
* @param {Object} date - Object containing date to retrieve lunch. Leaving fields empty will default to today
* @param {Number} [date.year] - What year to get lunch (Optional. Defaults to current year.)
* @param {Number} [date.month] - Month number to get lunch. (1-12) (Optional. Defaults to current month.)
* @param {Number} [date.day] - Day of month to get lunch. (Optional. Defaults to current day.)
* @param {getLunchCallback} callback - Callback
*/
/**
* Returns JSON containing lunch for week
* @callback getLunchCallback
*
* @param {Object} err - Null if success, error object if failure.
* @param {Object} lunchJSON - JSON of lunch menu for the week. Null if error.
*/
function getLunch(date, callback) {
if(typeof callback !== 'function') return;
var current = new Date();
// Default date to current values
if(typeof date.year !== 'number' || date.year % 1 !== 0) {
date.year = current.getFullYear();
}
if(typeof date.month !== 'number' || date.month % 1 !== 0) {
date.month = current.getMonth() + 1;
}
if(typeof date.day !== 'number' || date.day % 1 !== 0) {
date.day = current.getDate();
}
var currentDay = new Date(date.year, date.month - 1, date.day);
// Send POST request to lunch website
request.post(lunchURL, { form: { 'current_day': currentDay }}, function(err, res, body) {
if(err) {
callback(new Error('There was a problem fetching the lunch data!'), null);
return;
}
if(res.statusCode !== 200) {
/**
* @TODO -
* This should never happen and could mean the URL changed.
* It should send email to MyMICDS Devs.
*/
callback(new Error('There was a problem with the lunch URL!'), null);
return;
}
var lunchJSON = parseLunch(body);
callback(null, lunchJSON);
});
}
/**
* Takes the body of the school's lunch page and returns lunch JSON
* @function parseLunch
*
* @param {string} body - Body of HTML
* @returns {Object}
*/
function parseLunch(body) {
// Clean up HTML to prevent cheerio from becoming confused
body.replace('<<', '<<');
body.replace('>>', '>>');
var $ = cheerio.load(body);
var json = {};
var table = $('table#table_calendar_week');
var weekColumns = table.find('td');
weekColumns.each(function(index) {
var day = $(this);
var date = day.attr('day_no');
var dateObject = new Date(date);
var dateString = dateObject.getFullYear()
+ '-' + utils.leadingZeros(dateObject.getMonth() + 1)
+ '-' + utils.leadingZeros(dateObject.getDate());
for(var i = 0; i < schools.length; i++) {
var school = schools[i];
var schoolLunch = day.find('div[location="' + school + '"]');
// Make sure it's not the weekend
if(schoolLunch.length > 0) {
var lunchTitle = schoolLunch.find('span.period-value').text().trim();
var categories = schoolLunch.find('div.category-week');
categories.each(function() {
var category = $(this);
var food = [];
var categoryTitle = category.find('span.category-value').text().trim();
var items = category.find('div.item-week');
items.each(function() {
food.push($(this).text().trim());
});
// Add to JSON
json[dateString] = json[dateString] || {};
json[dateString][schoolFilter(school)] = json[dateString][schoolFilter(school)] || {};
json[dateString][schoolFilter(school)]['title'] = lunchTitle;
json[dateString][schoolFilter(school)]['categories'] = json[dateString][schoolFilter(school)]['categories'] || {};
json[dateString][schoolFilter(school)]['categories'][categoryTitle] = json[dateString][schoolFilter(school)]['categories'][categoryTitle] || [];
for(var j = 0; j < food.length; j++) {
json[dateString][schoolFilter(school)]['categories'][categoryTitle].push(food[j]);
}
});
}
}
});
return json;
}
/**
* Removes spaces and makes whole string lowercase for JSON
* @function schoolFilter
* @param {string} school - String with school name
* @returns {string}
*/
function schoolFilter(school) {
return school.replace(/\s+/g, '').toLowerCase();
}
module.exports.get = getLunch;
module.exports.parse = parseLunch;
| Fix date parsing for lunch scrapper
| src/libs/lunch.js | Fix date parsing for lunch scrapper | <ide><path>rc/libs/lunch.js
<ide> */
<ide>
<ide> function parseLunch(body) {
<del>
<ide> // Clean up HTML to prevent cheerio from becoming confused
<ide> body.replace('<<', '<<');
<ide> body.replace('>>', '>>');
<ide> weekColumns.each(function(index) {
<ide>
<ide> var day = $(this);
<del> var date = day.attr('day_no');
<add> var date = day.attr('this_date');
<ide> var dateObject = new Date(date);
<ide> var dateString = dateObject.getFullYear()
<ide> + '-' + utils.leadingZeros(dateObject.getMonth() + 1) |
|
Java | mit | 7be9da03423bbc5acb71d2f024c5d948dfee798e | 0 | InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service | package org.innovateuk.ifs.assessment.period.transactional;
import org.innovateuk.ifs.commons.resource.PageResource;
import org.innovateuk.ifs.commons.security.SecuredBySpring;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.competition.resource.AssessmentPeriodResource;
import org.innovateuk.ifs.crud.IfsCrudService;
import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize;
import java.util.List;
/**
* Service for operations around the usage and processing of AssessmentPeriod
*/
public interface AssessmentPeriodService extends IfsCrudService<AssessmentPeriodResource, Long> {
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<AssessmentPeriodResource> get(Long aLong);
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<List<AssessmentPeriodResource>> get(List<Long> longs);
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<AssessmentPeriodResource> update(Long aLong, AssessmentPeriodResource assessmentPeriodResource);
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<Void> delete(Long aLong);
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<AssessmentPeriodResource> create(AssessmentPeriodResource assessmentPeriodResource);
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only Comp Admins are able to get the assessment periods for the given competitions")
ServiceResult<List<AssessmentPeriodResource>> getAssessmentPeriodByCompetitionId(long competitionId);
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only Comp Admins are able to get the assessment periods for the given competitions")
ServiceResult<PageResource<AssessmentPeriodResource>> getAssessmentPeriodByCompetitionId(long competitionId, Pageable page);
}
| ifs-data-layer/ifs-data-service/src/main/java/org/innovateuk/ifs/assessment/period/transactional/AssessmentPeriodService.java | package org.innovateuk.ifs.assessment.period.transactional;
import org.innovateuk.ifs.commons.resource.PageResource;
import org.innovateuk.ifs.commons.security.SecuredBySpring;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.competition.resource.AssessmentPeriodResource;
import org.innovateuk.ifs.crud.IfsCrudService;
import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize;
import java.util.List;
/**
* Service for operations around the usage and processing of AssessmentPeriod
*/
public interface AssessmentPeriodService extends IfsCrudService<AssessmentPeriodResource, Long> {
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<AssessmentPeriodResource> get(Long aLong);
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<List<AssessmentPeriodResource>> get(List<Long> longs);
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<AssessmentPeriodResource> update(Long aLong, AssessmentPeriodResource assessmentPeriodResource);
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<Void> delete(Long aLong);
@Override
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only comp admins can perform actions on assessment periods")
ServiceResult<AssessmentPeriodResource> create(AssessmentPeriodResource assessmentPeriodResource);
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only Comp Admins are able to get the assessment periods for the given competitions")
ServiceResult<List<AssessmentPeriodResource>> getAssessmentPeriodByCompetitionId(long competitionId);
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only Comp Admins are able to get the assessment periods for the given competitions")
@PreAuthorize("hasAuthority('comp_admin')")
@SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
description = "Only Comp Admins are able to get the assessment periods for the given competitions")
ServiceResult<PageResource<AssessmentPeriodResource>> getAssessmentPeriodByCompetitionId(long competitionId, Pageable page);
}
| IFS-9008 merge conflict resolve
| ifs-data-layer/ifs-data-service/src/main/java/org/innovateuk/ifs/assessment/period/transactional/AssessmentPeriodService.java | IFS-9008 merge conflict resolve | <ide><path>fs-data-layer/ifs-data-service/src/main/java/org/innovateuk/ifs/assessment/period/transactional/AssessmentPeriodService.java
<ide> @PreAuthorize("hasAuthority('comp_admin')")
<ide> @SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
<ide> description = "Only Comp Admins are able to get the assessment periods for the given competitions")
<del>
<del> @PreAuthorize("hasAuthority('comp_admin')")
<del> @SecuredBySpring(value="READ", securedType= AssessmentPeriodResource.class,
<del> description = "Only Comp Admins are able to get the assessment periods for the given competitions")
<ide> ServiceResult<PageResource<AssessmentPeriodResource>> getAssessmentPeriodByCompetitionId(long competitionId, Pageable page);
<ide> } |
|
Java | epl-1.0 | f31aa9268f322a70262cf38bc95deefe426d8bbb | 0 | elexis/elexis-3-core,sazgin/elexis-3-core,elexis/elexis-3-core,elexis/elexis-3-core,elexis/elexis-3-core,sazgin/elexis-3-core,sazgin/elexis-3-core,sazgin/elexis-3-core | /*******************************************************************************
* Copyright (c) 2013 MEDEVIT <[email protected]>.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* MEDEVIT <[email protected]> - initial API and implementation
******************************************************************************/
package ch.elexis.core.data.activator;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.ResourceBundle;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Platform;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.elexis.Desk;
import ch.elexis.core.constants.Preferences;
import ch.elexis.core.constants.StringConstants;
import ch.elexis.core.data.Anwender;
import ch.elexis.core.data.Mandant;
import ch.elexis.core.data.PersistentObject;
import ch.elexis.core.data.PersistentObjectFactory;
import ch.elexis.core.data.Query;
import ch.elexis.core.data.admin.AccessControl;
import ch.elexis.core.data.events.ElexisEvent;
import ch.elexis.core.data.events.ElexisEventDispatcher;
import ch.elexis.core.data.events.Heartbeat;
import ch.elexis.core.data.events.Heartbeat.HeartListener;
import ch.elexis.core.data.events.PatientEventListener;
import ch.elexis.core.data.extension.CoreOperationExtensionPoint;
import ch.elexis.core.data.interfaces.ShutdownJob;
import ch.elexis.core.data.interfaces.events.MessageEvent;
import ch.elexis.core.data.preferences.CorePreferenceInitializer;
import ch.elexis.core.data.util.PlatformHelper;
import ch.rgw.io.LockFile;
import ch.rgw.io.Resource;
import ch.rgw.io.Settings;
import ch.rgw.io.SqlSettings;
import ch.rgw.io.SysSettings;
import ch.rgw.tools.Log;
import ch.rgw.tools.StringTool;
import ch.rgw.tools.VersionInfo;
/**
* @since 3.0.0
*/
public class CoreHub implements BundleActivator {
public static final String PLUGIN_ID = "ch.elexis.core.data";
/*
* This version is needed to compare the DB
*/
public static String Version = "3.0.0.qualifier"; //$NON-NLS-1$
public static final String APPLICATION_NAME = "Elexis Core"; //$NON-NLS-1$
static final String neededJRE = "1.7.0"; //$NON-NLS-1$
public static final String DBVersion = "1.8.16"; //$NON-NLS-1$
protected static Logger log = LoggerFactory.getLogger(CoreHub.class
.getName());
private static String LocalCfgFile = null;
private BundleContext context;
/** Das Singleton-Objekt dieser Klasse */
public static CoreHub plugin;
private static List<ShutdownJob> shutdownJobs = new LinkedList<ShutdownJob>();
/** Factory für interne PersistentObjects */
public static final PersistentObjectFactory poFactory = new PersistentObjectFactory();
/** Heartbeat */
public static Heartbeat heart;
/**
* Beschreibbares Verzeichnis für userspezifische Konfigurationsdaten etc.
* Achtung: "User" meint hier: den eingeloggten Betriebssystem-User, nicht
* den Elexis-User. In Windows wird userDir meist %USERPROFILE%\elexis sein,
* in Linux ~./elexis. Es kann mit getWritableUserDir() geholt werden.
* */
static File userDir;
/** Globale Einstellungen (Werden in der Datenbank gespeichert) */
public static Settings globalCfg;
/** Lokale Einstellungen (Werden in der Registry bzw. ~/.java gespeichert) */
public static Settings localCfg;
/** Anwenderspezifische Einstellungen (Werden in der Datenbank gespeichert) */
public static Settings userCfg;
/** Mandantspezifische EInstellungen (Werden in der Datenbank gespeichert) */
public static Settings mandantCfg;
public static Anwender actUser; // TODO set
public static Mandant actMandant; // TODO set
/** Der Initialisierer für die Voreinstellungen */
public static final CorePreferenceInitializer pin = new CorePreferenceInitializer();
/** Die zentrale Zugriffskontrolle */
public static final AccessControl acl = new AccessControl();
/**
* The listener for patient events
*/
private final PatientEventListener eeli_pat = new PatientEventListener();
/**
* get the base directory of this currently running elexis application
*
* @return the topmost directory of this application or null if this
* information could not be retrieved
*/
public static String getBasePath() {
return PlatformHelper.getBasePath(PLUGIN_ID);
}
/**
* Return a directory suitable for temporary files. Most probably this will
* be a default tempdir provided by the os. If none such exists, it will be
* the user dir.
*
* @return always a valid and writable directory.
*/
public static File getTempDir() {
File ret = null;
String temp = System.getProperty("java.io.tmpdir"); //$NON-NLS-1$
if (!StringTool.isNothing(temp)) {
ret = new File(temp);
if (ret.exists() && ret.isDirectory()) {
return ret;
} else {
if (ret.mkdirs()) {
return ret;
}
}
}
return getWritableUserDir();
}
/**
* return a directory suitable for plugin specific configuration data. If no
* such dir exists, it will be created. If it could not be created, the
* application will refuse to start.
*
* @return a directory that exists always and is always writable and
* readable for plugins of the currently running elexis instance.
* Caution: this directory is not necessarily shared among different
* OS-Users. In Windows it is normally %USERPROFILE%\elexis, in
* Linux ~./elexis
*/
public static File getWritableUserDir() {
if (userDir == null) {
String userhome = null;
if (localCfg != null) {
userhome = localCfg.get("elexis-userDir", null); //$NON-NLS-1$
}
if (userhome == null) {
userhome = System.getProperty("user.home"); //$NON-NLS-1$
}
if (StringTool.isNothing(userhome)) {
userhome = System.getProperty("java.io.tempdir"); //$NON-NLS-1$
}
userDir = new File(userhome, "elexis"); //$NON-NLS-1$
}
if (!userDir.exists()) {
if (!userDir.mkdirs()) {
System.err.print("fatal: could not create Userdir"); //$NON-NLS-1$
MessageEvent
.fireLoggedError(
"Panic exit",
"could not create userdir "
+ userDir.getAbsolutePath());
System.exit(-5);
}
}
return userDir;
}
@Override
public void start(BundleContext context) throws Exception {
this.context = context;
log.debug("Starting "+CoreHub.class.getName());
plugin = this;
// Check if we "are complete" - throws Error if not
CoreOperationExtensionPoint.getCoreOperationAdvisor();
startUpBundle();
setUserDir(userDir);
heart = Heartbeat.getInstance();
ElexisEventDispatcher.getInstance().addListeners(eeli_pat);
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run(){
SysSettings localCfg = (SysSettings) CoreHub.localCfg;
localCfg.write_xml(LocalCfgFile);
}
});
}
/*
* We use maven resources filtering replace in rsc/version.properties the
* property ${pom.version} by the current build version and place it into
* the file /version.properties of each jar file.
*
* See http://maven.apache.org
* /plugins/maven-resources-plugin/examples/filter.html
*/
public static String readElexisBuildVersion() {
Properties prop = new Properties();
String elexis_version = "Developer";
String qualifier = " ?? ";
try {
URL url;
url = new URL(
"platform:/plugin/ch.elexis.core.data/version.properties");
InputStream inputStream = url.openConnection().getInputStream();
if (inputStream != null) {
prop.load(inputStream);
elexis_version = prop.getProperty("elexis.version");
qualifier = prop.getProperty("elexis.qualifier");
}
} catch (IOException e) {
log.warn("Error reading build version information. ", e);
}
return elexis_version.replace("-SNAPSHOT", "") + " " + qualifier;
}
@Override
public void stop(BundleContext context) throws Exception {
log.debug("Stopping " + CoreHub.class.getName());
if (CoreHub.actUser != null) {
Anwender.logoff();
}
PersistentObject.disconnect();
ElexisEventDispatcher.getInstance().removeListeners(eeli_pat);
ElexisEventDispatcher.getInstance().dump();
globalCfg = null;
heart.stop();
plugin = null;
this.context = null;
}
private void startUpBundle() {
String[] args = Platform.getApplicationArgs();
String config = "default"; //$NON-NLS-1$
for (String s : args) {
if (s.startsWith("--use-config=")) { //$NON-NLS-1$
String[] c = s.split("="); //$NON-NLS-1$
config = c[1];
}
}
loadLocalCfg(config);
// Damit Anfragen auf userCfg und mandantCfg bei nicht eingeloggtem User
// keine NPE werfen
userCfg = localCfg;
mandantCfg = localCfg;
// Java Version prüfen
VersionInfo vI = new VersionInfo(System.getProperty(
"java.version", "0.0.0")); //$NON-NLS-1$ //$NON-NLS-2$
log.info(getId() + "; Java: " + vI.version() + "\nencoding: "
+ System.getProperty("file.encoding"));
if (vI.isOlder(neededJRE)) {
MessageEvent.fireLoggedError("Invalid Java version",
"Your Java version is older than " + neededJRE
+ ", please update.");
}
log.info("Basepath: " + getBasePath());
pin.initializeDefaultPreferences();
heart = Heartbeat.getInstance();
initializeLock();
}
private static void initializeLock() {
final int timeoutSeconds = 600;
try {
final LockFile lockfile = new LockFile(userDir,
"elexislock", 4, timeoutSeconds); //$NON-NLS-1$
final int n = lockfile.lock();
if (n == 0) {
MessageEvent
.fireLoggedError("Too many instances",
"Too many concurrent instances of Elexis running. Will exit.");
log.error("Too many concurent instances. Check elexis.lock files");
System.exit(2);
} else {
HeartListener lockListener = new HeartListener() {
long timeSet;
public void heartbeat() {
long now = System.currentTimeMillis();
if ((now - timeSet) > timeoutSeconds) {
lockfile.updateLock(n);
timeSet = now;
}
}
};
heart.addListener(lockListener, Heartbeat.FREQUENCY_LOW);
}
} catch (IOException ex) {
log.error("Can not aquire lock file in " + userDir + "; " + ex.getMessage()); //$NON-NLS-1$
}
}
public static String getId() {
StringBuilder sb = new StringBuilder();
sb.append(APPLICATION_NAME).append(" v.").append(Version).append("\n")
.append(CoreHubHelper.getRevision(true, plugin)).append("\n")
.append(System.getProperty("os.name"))
.append(StringConstants.SLASH)
.append(System.getProperty("os.version"))
.append(StringConstants.SLASH)
.append(System.getProperty("os.arch")); //$NON-NLS-1$
return sb.toString();
}
private void loadLocalCfg(String branch) {
LocalCfgFile = CoreHubHelper.getWritableUserDir() + "/localCfg_"
+ branch + ".xml";
log.debug("Loading branch "+branch+" from " +LocalCfgFile +" as localCfg");
SysSettings cfg = new SysSettings(SysSettings.USER_SETTINGS, Desk.class);
cfg.read_xml(LocalCfgFile);
CoreHub.localCfg = cfg;
}
public static void setMandant(Mandant newMandant) {
if (actMandant != null) {
mandantCfg.flush();
}
if(newMandant == null) {
mandantCfg = userCfg;
} else {
mandantCfg =
new SqlSettings(PersistentObject.getConnection(),
"USERCONFIG", "Param", "Value", "UserID=" + newMandant.getWrappedId());
}
actMandant = newMandant;
ElexisEventDispatcher.getInstance().fire(
new ElexisEvent(CoreHub.actMandant, Mandant.class, ElexisEvent.EVENT_MANDATOR_CHANGED));
}
public Bundle getBundle() {
return context.getBundle();
}
/**
* get a list of all mandators known to this system
*/
public static List<Mandant> getMandantenList(){
Query<Mandant> qbe = new Query<Mandant>(Mandant.class);
return qbe.execute();
}
/**
* get a list of all users known to this system
*/
public static List<Anwender> getUserList(){
Query<Anwender> qbe = new Query<Anwender>(Anwender.class);
return qbe.execute();
}
/**
* Return the name of a config instance, the user chose. This is just the valuie of the
* -Dconfig=xx runtime value or "default" if no -Dconfig was set
*/
public static String getCfgVariant(){
String config = System.getProperty("config");
return config == null ? "default" : config;
}
public void setUserDir(File dir){
userDir = dir;
localCfg.set("elexis-userDir", dir.getAbsolutePath()); //$NON-NLS-1$
}
/**
* Add a ShutdownJob to the list of jobs that has to be done after the Elexis workbench was shut
* down.
*
* @param job
*/
public static void addShutdownJob(final ShutdownJob job){
if (!shutdownJobs.contains(job)) {
shutdownJobs.add(job);
}
}
public static int getSystemLogLevel(){
return localCfg.get(Preferences.ABL_LOGLEVEL, Log.ERRORS);
}
}
| ch.elexis.core.data/src/ch/elexis/core/data/activator/CoreHub.java | /*******************************************************************************
* Copyright (c) 2013 MEDEVIT <[email protected]>.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* MEDEVIT <[email protected]> - initial API and implementation
******************************************************************************/
package ch.elexis.core.data.activator;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.ResourceBundle;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Platform;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.elexis.Desk;
import ch.elexis.core.constants.Preferences;
import ch.elexis.core.constants.StringConstants;
import ch.elexis.core.data.Anwender;
import ch.elexis.core.data.Mandant;
import ch.elexis.core.data.PersistentObject;
import ch.elexis.core.data.PersistentObjectFactory;
import ch.elexis.core.data.Query;
import ch.elexis.core.data.admin.AccessControl;
import ch.elexis.core.data.events.ElexisEvent;
import ch.elexis.core.data.events.ElexisEventDispatcher;
import ch.elexis.core.data.events.Heartbeat;
import ch.elexis.core.data.events.Heartbeat.HeartListener;
import ch.elexis.core.data.events.PatientEventListener;
import ch.elexis.core.data.extension.CoreOperationExtensionPoint;
import ch.elexis.core.data.interfaces.ShutdownJob;
import ch.elexis.core.data.interfaces.events.MessageEvent;
import ch.elexis.core.data.preferences.CorePreferenceInitializer;
import ch.elexis.core.data.util.PlatformHelper;
import ch.rgw.io.LockFile;
import ch.rgw.io.Resource;
import ch.rgw.io.Settings;
import ch.rgw.io.SqlSettings;
import ch.rgw.io.SysSettings;
import ch.rgw.tools.Log;
import ch.rgw.tools.StringTool;
import ch.rgw.tools.VersionInfo;
/**
* @since 3.0.0
*/
public class CoreHub implements BundleActivator {
public static final String PLUGIN_ID = "ch.elexis.core.data";
/*
* This version is needed to compare the DB
*/
public static String Version = "3.0.0.qualifier"; //$NON-NLS-1$
public static final String APPLICATION_NAME = "Elexis Core"; //$NON-NLS-1$
static final String neededJRE = "1.7.0"; //$NON-NLS-1$
public static final String DBVersion = "1.8.16"; //$NON-NLS-1$
protected static Logger log = LoggerFactory.getLogger(CoreHub.class
.getName());
private static String LocalCfgFile = null;
private BundleContext context;
/** Das Singleton-Objekt dieser Klasse */
public static CoreHub plugin;
private static List<ShutdownJob> shutdownJobs = new LinkedList<ShutdownJob>();
/** Factory für interne PersistentObjects */
public static final PersistentObjectFactory poFactory = new PersistentObjectFactory();
/** Heartbeat */
public static Heartbeat heart;
/**
* Beschreibbares Verzeichnis für userspezifische Konfigurationsdaten etc.
* Achtung: "User" meint hier: den eingeloggten Betriebssystem-User, nicht
* den Elexis-User. In Windows wird userDir meist %USERPROFILE%\elexis sein,
* in Linux ~./elexis. Es kann mit getWritableUserDir() geholt werden.
* */
static File userDir;
/** Globale Einstellungen (Werden in der Datenbank gespeichert) */
public static Settings globalCfg;
/** Lokale Einstellungen (Werden in der Registry bzw. ~/.java gespeichert) */
public static Settings localCfg;
/** Anwenderspezifische Einstellungen (Werden in der Datenbank gespeichert) */
public static Settings userCfg;
/** Mandantspezifische EInstellungen (Werden in der Datenbank gespeichert) */
public static Settings mandantCfg;
public static Anwender actUser; // TODO set
public static Mandant actMandant; // TODO set
/** Der Initialisierer für die Voreinstellungen */
public static final CorePreferenceInitializer pin = new CorePreferenceInitializer();
/** Die zentrale Zugriffskontrolle */
public static final AccessControl acl = new AccessControl();
/**
* The listener for patient events
*/
private final PatientEventListener eeli_pat = new PatientEventListener();
/**
* get the base directory of this currently running elexis application
*
* @return the topmost directory of this application or null if this
* information could not be retrieved
*/
public static String getBasePath() {
return PlatformHelper.getBasePath(PLUGIN_ID);
}
/**
* Return a directory suitable for temporary files. Most probably this will
* be a default tempdir provided by the os. If none such exists, it will be
* the user dir.
*
* @return always a valid and writable directory.
*/
public static File getTempDir() {
File ret = null;
String temp = System.getProperty("java.io.tmpdir"); //$NON-NLS-1$
if (!StringTool.isNothing(temp)) {
ret = new File(temp);
if (ret.exists() && ret.isDirectory()) {
return ret;
} else {
if (ret.mkdirs()) {
return ret;
}
}
}
return getWritableUserDir();
}
/**
* return a directory suitable for plugin specific configuration data. If no
* such dir exists, it will be created. If it could not be created, the
* application will refuse to start.
*
* @return a directory that exists always and is always writable and
* readable for plugins of the currently running elexis instance.
* Caution: this directory is not necessarily shared among different
* OS-Users. In Windows it is normally %USERPROFILE%\elexis, in
* Linux ~./elexis
*/
public static File getWritableUserDir() {
if (userDir == null) {
String userhome = null;
if (localCfg != null) {
userhome = localCfg.get("elexis-userDir", null); //$NON-NLS-1$
}
if (userhome == null) {
userhome = System.getProperty("user.home"); //$NON-NLS-1$
}
if (StringTool.isNothing(userhome)) {
userhome = System.getProperty("java.io.tempdir"); //$NON-NLS-1$
}
userDir = new File(userhome, "elexis"); //$NON-NLS-1$
}
if (!userDir.exists()) {
if (!userDir.mkdirs()) {
System.err.print("fatal: could not create Userdir"); //$NON-NLS-1$
MessageEvent
.fireLoggedError(
"Panic exit",
"could not create userdir "
+ userDir.getAbsolutePath());
System.exit(-5);
}
}
return userDir;
}
@Override
public void start(BundleContext context) throws Exception {
this.context = context;
log.debug("Starting "+CoreHub.class.getName());
plugin = this;
// Check if we "are complete" - throws Error if not
CoreOperationExtensionPoint.getCoreOperationAdvisor();
startUpBundle();
setUserDir(userDir);
heart = Heartbeat.getInstance();
ElexisEventDispatcher.getInstance().addListeners(eeli_pat);
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run(){
SysSettings localCfg = (SysSettings) CoreHub.localCfg;
localCfg.write_xml(LocalCfgFile);
}
});
}
/*
* We use maven resources filtering replace in rsc/version.properties the
* property ${pom.version} by the current build version and place it into
* the file /version.properties of each jar file.
*
* See http://maven.apache.org
* /plugins/maven-resources-plugin/examples/filter.html
*/
public static String readElexisBuildVersion() {
Properties prop = new Properties();
String elexis_version = "Developer";
String qualifier = " ?? ";
try {
URL url;
url = new URL(
"platform:/plugin/ch.elexis.core.data/version.properties");
InputStream inputStream = url.openConnection().getInputStream();
if (inputStream != null) {
prop.load(inputStream);
elexis_version = prop.getProperty("elexis.version");
qualifier = prop.getProperty("elexis.qualifier");
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return elexis_version.replace("-SNAPSHOT", "") + " " + qualifier;
}
@Override
public void stop(BundleContext context) throws Exception {
log.debug("Stopping " + CoreHub.class.getName());
if (CoreHub.actUser != null) {
Anwender.logoff();
}
PersistentObject.disconnect();
ElexisEventDispatcher.getInstance().removeListeners(eeli_pat);
ElexisEventDispatcher.getInstance().dump();
globalCfg = null;
heart.stop();
plugin = null;
this.context = null;
}
private void startUpBundle() {
String[] args = Platform.getApplicationArgs();
String config = "default"; //$NON-NLS-1$
for (String s : args) {
if (s.startsWith("--use-config=")) { //$NON-NLS-1$
String[] c = s.split("="); //$NON-NLS-1$
config = c[1];
}
}
loadLocalCfg(config);
// Damit Anfragen auf userCfg und mandantCfg bei nicht eingeloggtem User
// keine NPE werfen
userCfg = localCfg;
mandantCfg = localCfg;
// Java Version prüfen
VersionInfo vI = new VersionInfo(System.getProperty(
"java.version", "0.0.0")); //$NON-NLS-1$ //$NON-NLS-2$
log.info(getId() + "; Java: " + vI.version() + "\nencoding: "
+ System.getProperty("file.encoding"));
if (vI.isOlder(neededJRE)) {
MessageEvent.fireLoggedError("Invalid Java version",
"Your Java version is older than " + neededJRE
+ ", please update.");
}
log.info("Basepath: " + getBasePath());
pin.initializeDefaultPreferences();
heart = Heartbeat.getInstance();
initializeLock();
}
private static void initializeLock() {
final int timeoutSeconds = 600;
try {
final LockFile lockfile = new LockFile(userDir,
"elexislock", 4, timeoutSeconds); //$NON-NLS-1$
final int n = lockfile.lock();
if (n == 0) {
MessageEvent
.fireLoggedError("Too many instances",
"Too many concurrent instances of Elexis running. Will exit.");
log.error("Too many concurent instances. Check elexis.lock files");
System.exit(2);
} else {
HeartListener lockListener = new HeartListener() {
long timeSet;
public void heartbeat() {
long now = System.currentTimeMillis();
if ((now - timeSet) > timeoutSeconds) {
lockfile.updateLock(n);
timeSet = now;
}
}
};
heart.addListener(lockListener, Heartbeat.FREQUENCY_LOW);
}
} catch (IOException ex) {
log.error("Can not aquire lock file in " + userDir + "; " + ex.getMessage()); //$NON-NLS-1$
}
}
public static String getId() {
StringBuilder sb = new StringBuilder();
sb.append(APPLICATION_NAME).append(" v.").append(Version).append("\n")
.append(CoreHubHelper.getRevision(true, plugin)).append("\n")
.append(System.getProperty("os.name"))
.append(StringConstants.SLASH)
.append(System.getProperty("os.version"))
.append(StringConstants.SLASH)
.append(System.getProperty("os.arch")); //$NON-NLS-1$
return sb.toString();
}
private void loadLocalCfg(String branch) {
LocalCfgFile = CoreHubHelper.getWritableUserDir() + "/localCfg_"
+ branch + ".xml";
log.debug("Loading branch "+branch+" from " +LocalCfgFile +" as localCfg");
SysSettings cfg = new SysSettings(SysSettings.USER_SETTINGS, Desk.class);
cfg.read_xml(LocalCfgFile);
CoreHub.localCfg = cfg;
}
public static void setMandant(Mandant newMandant) {
if (actMandant != null) {
mandantCfg.flush();
}
if(newMandant == null) {
mandantCfg = userCfg;
} else {
mandantCfg =
new SqlSettings(PersistentObject.getConnection(),
"USERCONFIG", "Param", "Value", "UserID=" + newMandant.getWrappedId());
}
actMandant = newMandant;
ElexisEventDispatcher.getInstance().fire(
new ElexisEvent(CoreHub.actMandant, Mandant.class, ElexisEvent.EVENT_MANDATOR_CHANGED));
}
public Bundle getBundle() {
return context.getBundle();
}
/**
* get a list of all mandators known to this system
*/
public static List<Mandant> getMandantenList(){
Query<Mandant> qbe = new Query<Mandant>(Mandant.class);
return qbe.execute();
}
/**
* get a list of all users known to this system
*/
public static List<Anwender> getUserList(){
Query<Anwender> qbe = new Query<Anwender>(Anwender.class);
return qbe.execute();
}
/**
* Return the name of a config instance, the user chose. This is just the valuie of the
* -Dconfig=xx runtime value or "default" if no -Dconfig was set
*/
public static String getCfgVariant(){
String config = System.getProperty("config");
return config == null ? "default" : config;
}
public void setUserDir(File dir){
userDir = dir;
localCfg.set("elexis-userDir", dir.getAbsolutePath()); //$NON-NLS-1$
}
/**
* Add a ShutdownJob to the list of jobs that has to be done after the Elexis workbench was shut
* down.
*
* @param job
*/
public static void addShutdownJob(final ShutdownJob job){
if (!shutdownJobs.contains(job)) {
shutdownJobs.add(job);
}
}
public static int getSystemLogLevel(){
return localCfg.get(Preferences.ABL_LOGLEVEL, Log.ERRORS);
}
}
| Don't stacktrace on missing version.properties, log.warn suffices | ch.elexis.core.data/src/ch/elexis/core/data/activator/CoreHub.java | Don't stacktrace on missing version.properties, log.warn suffices | <ide><path>h.elexis.core.data/src/ch/elexis/core/data/activator/CoreHub.java
<ide> qualifier = prop.getProperty("elexis.qualifier");
<ide> }
<ide> } catch (IOException e) {
<del> // TODO Auto-generated catch block
<del> e.printStackTrace();
<add> log.warn("Error reading build version information. ", e);
<ide> }
<ide> return elexis_version.replace("-SNAPSHOT", "") + " " + qualifier;
<ide> } |
|
Java | apache-2.0 | 2e1ccba2ca81432456b994d80f1727ae7fb643c8 | 0 | RaffaelBild/arx,jgaupp/arx,bitraten/arx,kentoa/arx,kbabioch/arx,arx-deidentifier/arx,jgaupp/arx,fstahnke/arx,TheRealRasu/arx,tijanat/arx,arx-deidentifier/arx,kbabioch/arx,fstahnke/arx,TheRealRasu/arx,RaffaelBild/arx,COWYARD/arx,COWYARD/arx,tijanat/arx,kentoa/arx,bitraten/arx | /*
* ARX: Powerful Data Anonymization
* Copyright (C) 2012 - 2014 Florian Kohlmayer, Fabian Prasser
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.deidentifier.arx.gui.view.impl.explore;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.deidentifier.arx.ARXLattice;
import org.deidentifier.arx.ARXLattice.ARXNode;
import org.deidentifier.arx.ARXResult;
import org.deidentifier.arx.gui.Controller;
import org.deidentifier.arx.gui.model.Model;
import org.deidentifier.arx.gui.model.ModelEvent;
import org.deidentifier.arx.gui.model.ModelEvent.ModelPart;
import org.deidentifier.arx.gui.model.ModelNodeFilter;
import org.deidentifier.arx.gui.resources.Resources;
import org.deidentifier.arx.gui.view.SWTUtil;
import org.deidentifier.arx.gui.view.def.IView;
import org.deidentifier.arx.metric.InformationLoss;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import cern.colt.Arrays;
/**
* This class implements a list view on selected nodes.
* TODO: Highlight optimum and currently selected node in list
*
* @author prasser
*/
public class ViewList implements IView {
/** The controller */
private final Controller controller;
/** The format */
private final NumberFormat format = new DecimalFormat("##0.000"); //$NON-NLS-1$
/** The table */
private final Table table;
/** The model */
private Model model;
/** The list */
private final List<ARXNode> list = new ArrayList<ARXNode>();
/** The listener */
private Listener listener;
/**
* Init
*
* @param parent
* @param controller
*/
public ViewList(final Composite parent, final Controller controller) {
// Listen
controller.addListener(ModelPart.SELECTED_NODE, this);
controller.addListener(ModelPart.FILTER, this);
controller.addListener(ModelPart.MODEL, this);
controller.addListener(ModelPart.RESULT, this);
this.controller = controller;
table = new Table(parent, SWT.SINGLE | SWT.VIRTUAL | SWT.BORDER | SWT.V_SCROLL | SWT.FULL_SELECTION);
table.setLayoutData(SWTUtil.createFillGridData());
table.setHeaderVisible(true);
final TableColumn column1 = new TableColumn(table, SWT.LEFT);
column1.setText(Resources.getMessage("ListView.1")); //$NON-NLS-1$
final TableColumn column4 = new TableColumn(table, SWT.LEFT);
column4.setText(Resources.getMessage("ListView.2")); //$NON-NLS-1$
final TableColumn column2 = new TableColumn(table, SWT.LEFT);
column2.setText(Resources.getMessage("ListView.3")); //$NON-NLS-1$
final TableColumn column3 = new TableColumn(table, SWT.LEFT);
column3.setText(Resources.getMessage("ListView.4")); //$NON-NLS-1$
table.setItemCount(0);
column1.pack();
column2.pack();
column3.pack();
column4.pack();
}
@Override
public void dispose() {
controller.removeListener(this);
}
/**
* Resets the view
*/
@Override
public void reset() {
table.setRedraw(false);
for (final TableItem i : table.getItems()) {
i.dispose();
}
list.clear();
table.setRedraw(true);
if (listener != null) {
table.removeListener(SWT.SetData, listener);
}
SWTUtil.disable(table);
}
@Override
public void update(final ModelEvent event) {
if (event.part == ModelPart.RESULT) {
if (model.getResult() == null) reset();
} else if (event.part == ModelPart.SELECTED_NODE) {
// selectedNode = (ARXNode) event.data;
} else if (event.part == ModelPart.MODEL) {
reset();
model = (Model) event.data;
update(model.getResult(), model.getNodeFilter());
} else if (event.part == ModelPart.FILTER) {
if (model != null) {
update(model.getResult(), (ModelNodeFilter) event.data);
}
}
}
/**
* Converts an information loss into a relative value in percent
*
* @param infoLoss
* @return
*/
private double asRelativeValue(final InformationLoss<?> infoLoss) {
if (model != null && model.getResult() != null && model.getResult().getLattice() != null &&
model.getResult().getLattice().getBottom() != null && model.getResult().getLattice().getTop() != null) {
return infoLoss.relativeTo(model.getResult().getLattice().getMinimumInformationLoss(),
model.getResult().getLattice().getMaximumInformationLoss()) * 100d;
} else {
return 0;
}
}
/**
* Creates an item in the list
* @param item
* @param index
*/
private void createItem(final TableItem item, final int index) {
final ARXNode node = list.get(index);
final String transformation = Arrays.toString(node.getTransformation());
item.setText(0, transformation);
final String anonymity = node.getAnonymity().toString();
item.setText(1, anonymity);
String min = null;
if (node.getMinimumInformationLoss() != null) {
min = node.getMinimumInformationLoss().toString() +
" [" + format.format(asRelativeValue(node.getMinimumInformationLoss())) + "%]"; //$NON-NLS-1$ //$NON-NLS-2$
} else {
min = Resources.getMessage("ListView.7"); //$NON-NLS-1$
}
item.setText(2, min);
String max = null;
if (node.getMaximumInformationLoss() != null) {
max = node.getMaximumInformationLoss().toString() +
" [" + format.format(asRelativeValue(node.getMaximumInformationLoss())) + "%]"; //$NON-NLS-1$ //$NON-NLS-2$
} else {
max = Resources.getMessage("ListView.10"); //$NON-NLS-1$
}
item.setText(3, max);
}
/**
* Updates the list
* @param result
* @param filter
*/
private void update(final ARXResult result, final ModelNodeFilter filter) {
if (result == null || result.getLattice() == null) return;
if (filter == null) return;
controller.getResources().getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
table.setRedraw(false);
SWTUtil.enable(table);
for (final TableItem i : table.getItems()) {
i.dispose();
}
list.clear();
final ARXLattice l = result.getLattice();
for (final ARXNode[] level : l.getLevels()) {
for (final ARXNode node : level) {
if (filter.isAllowed(result.getLattice(), node)) {
list.add(node);
}
}
}
Collections.sort(list, new Comparator<ARXNode>() {
@Override
public int compare(final ARXNode arg0,
final ARXNode arg1) {
return arg0.getMaximumInformationLoss()
.compareTo(arg1.getMaximumInformationLoss());
}
});
// Check
if (list.size() > model.getMaxNodesInViewer()) {
list.clear();
}
if (listener != null) {
table.removeListener(SWT.SetData, listener);
}
listener = new Listener() {
@Override
public void handleEvent(final Event event) {
final TableItem item = (TableItem) event.item;
final int index = table.indexOf(item);
createItem(item, index);
}
};
table.addListener(SWT.SetData, listener);
table.setItemCount(list.size());
TableColumn[] colums = table.getColumns();
for (TableColumn tableColumn : colums) {
tableColumn.setWidth(120);
}
table.setRedraw(true);
}
});
}
}
| src/gui/org/deidentifier/arx/gui/view/impl/explore/ViewList.java | /*
* ARX: Powerful Data Anonymization
* Copyright (C) 2012 - 2014 Florian Kohlmayer, Fabian Prasser
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.deidentifier.arx.gui.view.impl.explore;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.deidentifier.arx.ARXLattice;
import org.deidentifier.arx.ARXLattice.ARXNode;
import org.deidentifier.arx.ARXResult;
import org.deidentifier.arx.gui.Controller;
import org.deidentifier.arx.gui.model.Model;
import org.deidentifier.arx.gui.model.ModelEvent;
import org.deidentifier.arx.gui.model.ModelEvent.ModelPart;
import org.deidentifier.arx.gui.model.ModelNodeFilter;
import org.deidentifier.arx.gui.resources.Resources;
import org.deidentifier.arx.gui.view.SWTUtil;
import org.deidentifier.arx.gui.view.def.IView;
import org.deidentifier.arx.metric.InformationLoss;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import cern.colt.Arrays;
/**
* This class implements a list view on selected nodes.
* TODO: Highlight optimum and currently selected node in list
*
* @author prasser
*/
public class ViewList implements IView {
/** The controller */
private final Controller controller;
/** The format */
private final NumberFormat format = new DecimalFormat("##0.000"); //$NON-NLS-1$
/** The table */
private final Table table;
/** The model */
private Model model;
/** The list */
private final List<ARXNode> list = new ArrayList<ARXNode>();
/** The listener */
private Listener listener;
/**
* Init
*
* @param parent
* @param controller
*/
public ViewList(final Composite parent, final Controller controller) {
// Listen
controller.addListener(ModelPart.SELECTED_NODE, this);
controller.addListener(ModelPart.FILTER, this);
controller.addListener(ModelPart.MODEL, this);
controller.addListener(ModelPart.RESULT, this);
this.controller = controller;
table = new Table(parent, SWT.SINGLE | SWT.VIRTUAL | SWT.BORDER | SWT.V_SCROLL);
table.setLayoutData(SWTUtil.createFillGridData());
table.setHeaderVisible(true);
final TableColumn column1 = new TableColumn(table, SWT.LEFT);
column1.setText(Resources.getMessage("ListView.1")); //$NON-NLS-1$
final TableColumn column4 = new TableColumn(table, SWT.LEFT);
column4.setText(Resources.getMessage("ListView.2")); //$NON-NLS-1$
final TableColumn column2 = new TableColumn(table, SWT.LEFT);
column2.setText(Resources.getMessage("ListView.3")); //$NON-NLS-1$
final TableColumn column3 = new TableColumn(table, SWT.LEFT);
column3.setText(Resources.getMessage("ListView.4")); //$NON-NLS-1$
table.setItemCount(0);
column1.pack();
column2.pack();
column3.pack();
column4.pack();
}
@Override
public void dispose() {
controller.removeListener(this);
}
/**
* Resets the view
*/
@Override
public void reset() {
table.setRedraw(false);
for (final TableItem i : table.getItems()) {
i.dispose();
}
list.clear();
table.setRedraw(true);
if (listener != null) {
table.removeListener(SWT.SetData, listener);
}
SWTUtil.disable(table);
}
@Override
public void update(final ModelEvent event) {
if (event.part == ModelPart.RESULT) {
if (model.getResult() == null) reset();
} else if (event.part == ModelPart.SELECTED_NODE) {
// selectedNode = (ARXNode) event.data;
} else if (event.part == ModelPart.MODEL) {
reset();
model = (Model) event.data;
update(model.getResult(), model.getNodeFilter());
} else if (event.part == ModelPart.FILTER) {
if (model != null) {
update(model.getResult(), (ModelNodeFilter) event.data);
}
}
}
/**
* Converts an information loss into a relative value in percent
*
* @param infoLoss
* @return
*/
private double asRelativeValue(final InformationLoss<?> infoLoss) {
if (model != null && model.getResult() != null && model.getResult().getLattice() != null &&
model.getResult().getLattice().getBottom() != null && model.getResult().getLattice().getTop() != null) {
return infoLoss.relativeTo(model.getResult().getLattice().getMinimumInformationLoss(),
model.getResult().getLattice().getMaximumInformationLoss()) * 100d;
} else {
return 0;
}
}
/**
* Creates an item in the list
* @param item
* @param index
*/
private void createItem(final TableItem item, final int index) {
final ARXNode node = list.get(index);
final String transformation = Arrays.toString(node.getTransformation());
item.setText(0, transformation);
final String anonymity = node.getAnonymity().toString();
item.setText(1, anonymity);
String min = null;
if (node.getMinimumInformationLoss() != null) {
min = node.getMinimumInformationLoss().toString() +
" [" + format.format(asRelativeValue(node.getMinimumInformationLoss())) + "%]"; //$NON-NLS-1$ //$NON-NLS-2$
} else {
min = Resources.getMessage("ListView.7"); //$NON-NLS-1$
}
item.setText(2, min);
String max = null;
if (node.getMaximumInformationLoss() != null) {
max = node.getMaximumInformationLoss().toString() +
" [" + format.format(asRelativeValue(node.getMaximumInformationLoss())) + "%]"; //$NON-NLS-1$ //$NON-NLS-2$
} else {
max = Resources.getMessage("ListView.10"); //$NON-NLS-1$
}
item.setText(3, max);
}
/**
* Updates the list
* @param result
* @param filter
*/
private void update(final ARXResult result, final ModelNodeFilter filter) {
if (result == null || result.getLattice() == null) return;
if (filter == null) return;
controller.getResources().getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
table.setRedraw(false);
SWTUtil.enable(table);
for (final TableItem i : table.getItems()) {
i.dispose();
}
list.clear();
final ARXLattice l = result.getLattice();
for (final ARXNode[] level : l.getLevels()) {
for (final ARXNode node : level) {
if (filter.isAllowed(result.getLattice(), node)) {
list.add(node);
}
}
}
Collections.sort(list, new Comparator<ARXNode>() {
@Override
public int compare(final ARXNode arg0,
final ARXNode arg1) {
return arg0.getMaximumInformationLoss()
.compareTo(arg1.getMaximumInformationLoss());
}
});
// Check
if (list.size() > model.getMaxNodesInViewer()) {
list.clear();
}
if (listener != null) {
table.removeListener(SWT.SetData, listener);
}
listener = new Listener() {
@Override
public void handleEvent(final Event event) {
final TableItem item = (TableItem) event.item;
final int index = table.indexOf(item);
createItem(item, index);
}
};
table.addListener(SWT.SetData, listener);
table.setItemCount(list.size());
TableColumn[] colums = table.getColumns();
for (TableColumn tableColumn : colums) {
tableColumn.setWidth(120);
}
table.setRedraw(true);
}
});
}
}
| Select whole line in table in explore view | src/gui/org/deidentifier/arx/gui/view/impl/explore/ViewList.java | Select whole line in table in explore view | <ide><path>rc/gui/org/deidentifier/arx/gui/view/impl/explore/ViewList.java
<ide>
<ide> this.controller = controller;
<ide>
<del> table = new Table(parent, SWT.SINGLE | SWT.VIRTUAL | SWT.BORDER | SWT.V_SCROLL);
<add> table = new Table(parent, SWT.SINGLE | SWT.VIRTUAL | SWT.BORDER | SWT.V_SCROLL | SWT.FULL_SELECTION);
<ide> table.setLayoutData(SWTUtil.createFillGridData());
<ide> table.setHeaderVisible(true);
<ide> |
|
Java | bsd-3-clause | d8cf94be50b7a03d373ba00f2fb5512d95935fd3 | 0 | rutgersmobile/android-client,rutgersmobile/android-client | package edu.rutgers.css.Rutgers.api;
import com.androidquery.AQuery;
import com.androidquery.callback.AjaxCallback;
import com.androidquery.callback.AjaxStatus;
import com.androidquery.util.XmlDom;
import org.jdeferred.Promise;
import org.jdeferred.impl.DeferredObject;
import org.json.JSONArray;
import org.json.JSONObject;
import edu.rutgers.css.Rutgers.RutgersApplication;
import edu.rutgers.css.Rutgers.utils.AppUtil;
/** Convenience class for making requests */
public class Request {
private static final String TAG = "Request";
private static AQuery sAq;
public static long CACHE_NEVER = -1; // -1 means always refresh -- never use cache
public static long CACHE_ONE_MINUTE = 1000 * 60;
public static long CACHE_ONE_HOUR = CACHE_ONE_MINUTE * 60;
public static long CACHE_ONE_DAY = CACHE_ONE_HOUR * 24;
private static void setup () {
if (sAq == null) {
sAq = new AQuery(RutgersApplication.getAppContext());
}
}
/**
* Get JSON from mobile server.
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return Promise for a JSONObject
*/
public static Promise<JSONObject, AjaxStatus, Double> api (String resource, long expire) {
return json(AppUtil.API_BASE + resource, expire);
}
/**
* Get JSON from mobile server, synchronously (blocking).
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return AjaxCallback for JSONObject
*/
public static AjaxCallback<JSONObject> apiSynchronous(String resource, long expire) {
return jsonSynchronous(AppUtil.API_BASE + resource, expire);
}
/**
* Get arbitrary JSON.
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return Promise for JSONObject
*/
public static Promise<JSONObject, AjaxStatus, Double> json (String resource, long expire) {
setup();
final DeferredObject<JSONObject, AjaxStatus, Double> deferred = new DeferredObject<JSONObject, AjaxStatus, Double>();
sAq.ajax(resource, JSONObject.class, expire, new AjaxCallback<JSONObject>() {
@Override
public void callback(String url, JSONObject json, AjaxStatus status) {
// Don't cache if we didn't get a valid object
if (json == null) {
status.invalidate();
deferred.reject(status);
} else deferred.resolve(json);
}
});
return deferred.promise();
}
/**
* Get arbitrary JSON synchronously (blocking).
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return AjaxCallback for JSONObject
*/
public static AjaxCallback<JSONObject> jsonSynchronous(String resource, long expire) {
setup();
AjaxCallback<JSONObject> callback = new AjaxCallback<JSONObject>();
callback.url(resource).expire(expire).type(JSONObject.class);
sAq.sync(callback);
// Don't cache if we didn't get a valid object
if(callback.getStatus().getCode() == AjaxStatus.TRANSFORM_ERROR) {
callback.getStatus().invalidate();
}
return callback;
}
/**
* Gets arbitrary JSON array.
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return Promise for JSONArray
*/
public static Promise<JSONArray, AjaxStatus, Double> jsonArray (String resource, long expire) {
setup();
final DeferredObject<JSONArray, AjaxStatus, Double> deferred = new DeferredObject<JSONArray, AjaxStatus, Double>();
sAq.ajax(resource, JSONArray.class, expire, new AjaxCallback<JSONArray>() {
@Override
public void callback(String url, JSONArray jsonArray, AjaxStatus status) {
// Don't cache if we didn't get a valid object
if (jsonArray == null) {
status.invalidate();
deferred.reject(status);
} else deferred.resolve(jsonArray);
}
});
return deferred.promise();
}
/**
* Get arbitrary XML.
* @param resource XML file URL
* @param expire Cache time in milliseconds
* @return Promise for XmlDom
*/
public static Promise<XmlDom, AjaxStatus, Double> xml (String resource, long expire) {
setup();
final DeferredObject<XmlDom, AjaxStatus, Double> deferred = new DeferredObject<XmlDom, AjaxStatus, Double>();
sAq.ajax(resource, XmlDom.class, expire, new AjaxCallback<XmlDom>() {
@Override
public void callback(String url, XmlDom xml, AjaxStatus status) {
// Don't cache if we didn't get a valid object
if (xml == null) {
status.invalidate();
deferred.reject(status);
} else deferred.resolve(xml);
}
});
return deferred.promise();
}
}
| app/src/main/java/edu/rutgers/css/Rutgers/api/Request.java | package edu.rutgers.css.Rutgers.api;
import com.androidquery.AQuery;
import com.androidquery.callback.AjaxCallback;
import com.androidquery.callback.AjaxStatus;
import com.androidquery.util.XmlDom;
import org.jdeferred.Promise;
import org.jdeferred.impl.DeferredObject;
import org.json.JSONArray;
import org.json.JSONObject;
import edu.rutgers.css.Rutgers.RutgersApplication;
import edu.rutgers.css.Rutgers.utils.AppUtil;
// Convenience class for making requests
public class Request {
private static final String TAG = "Request";
private static AQuery aq;
private static boolean mSetupDone = false;
public static long CACHE_NEVER = -1; // -1 means always refresh -- never use cache
public static long CACHE_ONE_MINUTE = 1000 * 60;
public static long CACHE_ONE_HOUR = CACHE_ONE_MINUTE * 60;
public static long CACHE_ONE_DAY = CACHE_ONE_HOUR * 24;
private static void setup () {
if (!mSetupDone) {
aq = new AQuery(RutgersApplication.getAppContext());
mSetupDone = true;
}
}
/**
* Get JSON from mobile server.
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return Promise for a JSONObject
*/
public static Promise<JSONObject, AjaxStatus, Double> api (String resource, long expire) {
return json(AppUtil.API_BASE + resource, expire);
}
/**
* Get full AJAX callback object for JSON from mobile server.
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return Promise for an AJAX callback object
*/
public static Promise<AjaxCallback<JSONObject>, AjaxStatus, Double> apiWithStatus(String resource, long expire) {
return jsonWithStatus(AppUtil.API_BASE + resource, expire);
}
/**
* Get JSON from mobile server, synchronously (blocking).
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return AjaxCallback for JSONObject
*/
public static AjaxCallback<JSONObject> apiSynchronous(String resource, long expire) {
return jsonSynchronous(AppUtil.API_BASE + resource, expire);
}
/**
* Get arbitrary JSON.
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return Promise for JSONObject
*/
public static Promise<JSONObject, AjaxStatus, Double> json (String resource, long expire) {
setup();
final DeferredObject<JSONObject, AjaxStatus, Double> deferred = new DeferredObject<JSONObject, AjaxStatus, Double>();
aq.ajax(resource, JSONObject.class, expire, new AjaxCallback<JSONObject>() {
@Override
public void callback(String url, JSONObject json, AjaxStatus status) {
// Don't cache if we didn't get a valid object
if(json == null) {
status.invalidate();
deferred.reject(status);
}
else deferred.resolve(json);
}
});
return deferred.promise();
}
/**
* Get arbitrary JSON, in full AJAX callback.
* @param resource
* @param expire
* @return
*/
public static Promise<AjaxCallback<JSONObject>, AjaxStatus, Double> jsonWithStatus(String resource, long expire) {
setup();
final DeferredObject<AjaxCallback<JSONObject>, AjaxStatus, Double> deferred = new DeferredObject<AjaxCallback<JSONObject>, AjaxStatus, Double>();
aq.ajax(resource, JSONObject.class, expire, new AjaxCallback<JSONObject>() {
@Override
public void callback(String url, JSONObject json, AjaxStatus status) {
// Don't cache if we didn't get a valid object
if(json == null) {
status.invalidate();
deferred.reject(status);
}
else deferred.resolve(this);
}
});
return deferred.promise();
}
/**
* Get arbitrary JSON synchronously (blocking).
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return AjaxCallback for JSONObject
*/
public static AjaxCallback<JSONObject> jsonSynchronous(String resource, long expire) {
setup();
AjaxCallback<JSONObject> callback = new AjaxCallback<JSONObject>();
callback.url(resource).expire(expire).type(JSONObject.class);
aq.sync(callback);
// Don't cache if we didn't get a valid object
if(callback.getStatus().getCode() == AjaxStatus.TRANSFORM_ERROR) {
callback.getStatus().invalidate();
}
return callback;
}
/**
* Gets arbitrary JSON array.
* @param resource JSON file URL
* @param expire Cache time in milliseconds
* @return Promise for JSONArray
*/
public static Promise<JSONArray, AjaxStatus, Double> jsonArray (String resource, long expire) {
setup();
final DeferredObject<JSONArray, AjaxStatus, Double> deferred = new DeferredObject<JSONArray, AjaxStatus, Double>();
aq.ajax(resource, JSONArray.class, expire, new AjaxCallback<JSONArray>() {
@Override
public void callback(String url, JSONArray jsonArray, AjaxStatus status) {
// Don't cache if we didn't get a valid object
if(jsonArray == null) {
status.invalidate();
deferred.reject(status);
}
else deferred.resolve(jsonArray);
}
});
return deferred.promise();
}
/**
* Get arbitrary XML.
* @param resource XML file URL
* @param expire Cache time in milliseconds
* @return Promise for XmlDom
*/
public static Promise<XmlDom, AjaxStatus, Double> xml (String resource, long expire) {
setup();
final DeferredObject<XmlDom, AjaxStatus, Double> deferred = new DeferredObject<XmlDom, AjaxStatus, Double>();
aq.ajax(resource, XmlDom.class, expire, new AjaxCallback<XmlDom>() {
@Override
public void callback(String url, XmlDom xml, AjaxStatus status) {
// Don't cache if we didn't get a valid object
if(xml == null) {
status.invalidate();
deferred.reject(status);
}
else deferred.resolve(xml);
}
});
return deferred.promise();
}
}
| Request API: removing unused methods
| app/src/main/java/edu/rutgers/css/Rutgers/api/Request.java | Request API: removing unused methods | <ide><path>pp/src/main/java/edu/rutgers/css/Rutgers/api/Request.java
<ide> import edu.rutgers.css.Rutgers.RutgersApplication;
<ide> import edu.rutgers.css.Rutgers.utils.AppUtil;
<ide>
<del>// Convenience class for making requests
<add>/** Convenience class for making requests */
<ide> public class Request {
<ide>
<ide> private static final String TAG = "Request";
<del> private static AQuery aq;
<del> private static boolean mSetupDone = false;
<add>
<add> private static AQuery sAq;
<ide>
<ide> public static long CACHE_NEVER = -1; // -1 means always refresh -- never use cache
<ide> public static long CACHE_ONE_MINUTE = 1000 * 60;
<ide> public static long CACHE_ONE_DAY = CACHE_ONE_HOUR * 24;
<ide>
<ide> private static void setup () {
<del> if (!mSetupDone) {
<del> aq = new AQuery(RutgersApplication.getAppContext());
<del>
<del> mSetupDone = true;
<add> if (sAq == null) {
<add> sAq = new AQuery(RutgersApplication.getAppContext());
<ide> }
<ide> }
<ide>
<ide> */
<ide> public static Promise<JSONObject, AjaxStatus, Double> api (String resource, long expire) {
<ide> return json(AppUtil.API_BASE + resource, expire);
<del> }
<del>
<del> /**
<del> * Get full AJAX callback object for JSON from mobile server.
<del> * @param resource JSON file URL
<del> * @param expire Cache time in milliseconds
<del> * @return Promise for an AJAX callback object
<del> */
<del> public static Promise<AjaxCallback<JSONObject>, AjaxStatus, Double> apiWithStatus(String resource, long expire) {
<del> return jsonWithStatus(AppUtil.API_BASE + resource, expire);
<ide> }
<ide>
<ide> /**
<ide> setup();
<ide> final DeferredObject<JSONObject, AjaxStatus, Double> deferred = new DeferredObject<JSONObject, AjaxStatus, Double>();
<ide>
<del> aq.ajax(resource, JSONObject.class, expire, new AjaxCallback<JSONObject>() {
<add> sAq.ajax(resource, JSONObject.class, expire, new AjaxCallback<JSONObject>() {
<ide>
<ide> @Override
<ide> public void callback(String url, JSONObject json, AjaxStatus status) {
<ide> // Don't cache if we didn't get a valid object
<del> if(json == null) {
<add> if (json == null) {
<ide> status.invalidate();
<ide> deferred.reject(status);
<del> }
<del> else deferred.resolve(json);
<add> } else deferred.resolve(json);
<ide> }
<del>
<add>
<ide> });
<ide>
<del> return deferred.promise();
<del> }
<del>
<del> /**
<del> * Get arbitrary JSON, in full AJAX callback.
<del> * @param resource
<del> * @param expire
<del> * @return
<del> */
<del> public static Promise<AjaxCallback<JSONObject>, AjaxStatus, Double> jsonWithStatus(String resource, long expire) {
<del> setup();
<del> final DeferredObject<AjaxCallback<JSONObject>, AjaxStatus, Double> deferred = new DeferredObject<AjaxCallback<JSONObject>, AjaxStatus, Double>();
<del>
<del> aq.ajax(resource, JSONObject.class, expire, new AjaxCallback<JSONObject>() {
<del> @Override
<del> public void callback(String url, JSONObject json, AjaxStatus status) {
<del> // Don't cache if we didn't get a valid object
<del> if(json == null) {
<del> status.invalidate();
<del> deferred.reject(status);
<del> }
<del> else deferred.resolve(this);
<del> }
<del> });
<del>
<ide> return deferred.promise();
<ide> }
<ide>
<ide> setup();
<ide> AjaxCallback<JSONObject> callback = new AjaxCallback<JSONObject>();
<ide> callback.url(resource).expire(expire).type(JSONObject.class);
<del> aq.sync(callback);
<add> sAq.sync(callback);
<ide> // Don't cache if we didn't get a valid object
<ide> if(callback.getStatus().getCode() == AjaxStatus.TRANSFORM_ERROR) {
<ide> callback.getStatus().invalidate();
<ide> setup();
<ide> final DeferredObject<JSONArray, AjaxStatus, Double> deferred = new DeferredObject<JSONArray, AjaxStatus, Double>();
<ide>
<del> aq.ajax(resource, JSONArray.class, expire, new AjaxCallback<JSONArray>() {
<add> sAq.ajax(resource, JSONArray.class, expire, new AjaxCallback<JSONArray>() {
<ide>
<ide> @Override
<ide> public void callback(String url, JSONArray jsonArray, AjaxStatus status) {
<ide> // Don't cache if we didn't get a valid object
<del> if(jsonArray == null) {
<add> if (jsonArray == null) {
<ide> status.invalidate();
<ide> deferred.reject(status);
<del> }
<del> else deferred.resolve(jsonArray);
<add> } else deferred.resolve(jsonArray);
<ide> }
<del>
<add>
<ide> });
<ide>
<ide> return deferred.promise();
<ide> setup();
<ide> final DeferredObject<XmlDom, AjaxStatus, Double> deferred = new DeferredObject<XmlDom, AjaxStatus, Double>();
<ide>
<del> aq.ajax(resource, XmlDom.class, expire, new AjaxCallback<XmlDom>() {
<add> sAq.ajax(resource, XmlDom.class, expire, new AjaxCallback<XmlDom>() {
<ide>
<ide> @Override
<ide> public void callback(String url, XmlDom xml, AjaxStatus status) {
<ide> // Don't cache if we didn't get a valid object
<del> if(xml == null) {
<add> if (xml == null) {
<ide> status.invalidate();
<ide> deferred.reject(status);
<del> }
<del> else deferred.resolve(xml);
<add> } else deferred.resolve(xml);
<ide> }
<del>
<add>
<ide> });
<ide>
<ide> return deferred.promise(); |
|
Java | apache-2.0 | 0d733557e9114111ee458ccde77993e7cc0927e9 | 0 | andriell/craftyfox,andriell/craftyfox,andriell/craftyfox | package com.github.andriell.processor;
import java.util.HashSet;
/**
* Created by Vika on 05.02.2016
*/
public class RunnableAdapter implements Runnable {
private HashSet<RunnableListenerInterface> listeners;
private Runnable runnable;
public RunnableAdapter(Runnable runnable) {
this.runnable = runnable;
listeners = new HashSet<RunnableListenerInterface>();
}
public RunnableAdapter addListener(RunnableListenerInterface listener) {
listeners.add(listener);
return this;
}
public RunnableAdapter removeListener(RunnableListenerInterface listener) {
listeners.remove(listener);
return this;
}
public int sizeListener() {
return listeners.size();
}
public void run() {
for (RunnableListenerInterface listener: listeners) {
try {
listener.onStart(runnable);
} catch (Exception e) {
listener.onException(runnable, e);
}
}
try {
runnable.run();
} catch (Exception e) {
for (RunnableListenerInterface listener: listeners) {
listener.onException(runnable, e);
}
}
for (RunnableListenerInterface listener: listeners) {
try {
listener.onComplete(runnable);
} catch (Exception e) {
listener.onException(runnable, e);
}
}
}
public static RunnableAdapter envelop(Runnable runnable) {
if (runnable instanceof RunnableAdapter) {
return (RunnableAdapter) runnable;
} else {
return new RunnableAdapter(runnable);
}
}
}
| src/main/java/com/github/andriell/processor/RunnableAdapter.java | package com.github.andriell.processor;
import java.util.HashSet;
/**
* Created by Vika on 05.02.2016
*/
public class RunnableAdapter implements Runnable {
private HashSet<RunnableListenerInterface> listeners;
private Runnable runnable;
public RunnableAdapter(Runnable runnable) {
this.runnable = runnable;
listeners = new HashSet<RunnableListenerInterface>();
}
public void addListener(RunnableListenerInterface listener) {
listeners.add(listener);
}
public void removeListener(RunnableListenerInterface listener) {
listeners.remove(listener);
}
public int sizeListener() {
return listeners.size();
}
public void run() {
for (RunnableListenerInterface listener: listeners) {
try {
listener.onStart(runnable);
} catch (Exception e) {
listener.onException(runnable, e);
}
}
try {
runnable.run();
} catch (Exception e) {
for (RunnableListenerInterface listener: listeners) {
listener.onException(runnable, e);
}
}
for (RunnableListenerInterface listener: listeners) {
try {
listener.onComplete(runnable);
} catch (Exception e) {
listener.onException(runnable, e);
}
}
}
public static RunnableAdapter envelop(Runnable runnable) {
if (runnable instanceof RunnableAdapter) {
return (RunnableAdapter) runnable;
} else {
return new RunnableAdapter(runnable);
}
}
}
| start
| src/main/java/com/github/andriell/processor/RunnableAdapter.java | start | <ide><path>rc/main/java/com/github/andriell/processor/RunnableAdapter.java
<ide> listeners = new HashSet<RunnableListenerInterface>();
<ide> }
<ide>
<del> public void addListener(RunnableListenerInterface listener) {
<add> public RunnableAdapter addListener(RunnableListenerInterface listener) {
<ide> listeners.add(listener);
<add> return this;
<ide> }
<ide>
<del> public void removeListener(RunnableListenerInterface listener) {
<add> public RunnableAdapter removeListener(RunnableListenerInterface listener) {
<ide> listeners.remove(listener);
<add> return this;
<ide> }
<ide>
<ide> public int sizeListener() { |
|
Java | apache-2.0 | 083169bedb6b71cc306d6bb5ed316250713ece70 | 0 | adligo/i_adi.adligo.org | package org.adligo.i.adi.client;
import org.adligo.i.adi.client.I_Invoker;
import com.google.gwt.user.client.rpc.AsyncCallback;
/**
* this is a single threaded way to make a async call synch
* this is useful for developers who do NOT want to rewrite this
* all the time because everything in adi was done with the async api
* to allow for GWT, and other optimized remote calls
*
* @author scott
*
*/
public class AsyncToSyncAdaptor implements AsyncCallback, Runnable {
private Object result;
private Throwable caught;
private Object arg;
private I_Invoker action;
public void onFailure(Throwable caught) {
this.caught = caught;
}
public void onSuccess(Object arg0) {
this.result = arg0;
}
public Object getResult() {
return result;
}
public Throwable getCaught() {
return caught;
}
public Object getArg() {
return arg;
}
public void setArg(Object arg) {
this.arg = arg;
}
public I_Invoker getAction() {
return action;
}
public void setAction(I_Invoker action) {
this.action = action;
}
public void run() {
if (action == null) {
caught = new NullPointerException(getClass().getName() +
" must have a action to run!");
return;
}
action.invoke(arg, this);
}
public void dispose() {
result = null;
caught = null;
arg = null;
action = null;
}
/**
* this will make a synch call,
*
* DO NOT USE ON A GWT CLIENT UNLESS YOU HAVE A VERY SPECIAL REASON,
* IT WILL BE VERY SLOW
*
* ONLY use when you know the action (I_Invoker) isLocal()
* and should process in the Current Thread,
* otherwise use the Async API
*
* @param action
* @param arg
* @return
* @throws Throwable
*/
public static final Object run(I_Invoker action, Object arg) throws Throwable {
AsyncToSyncAdaptor adaptor = new AsyncToSyncAdaptor();
adaptor.setAction(action);
adaptor.setArg(arg);
adaptor.run();
Throwable t = adaptor.getCaught();
if (t != null) {
// Some Exception so throw it
adaptor.dispose();
throw t;
}
return adaptor.getResult();
}
}
| src/org/adligo/i/adi/client/AsyncToSyncAdaptor.java | package org.adligo.i.adi.client;
import org.adligo.i.adi.client.I_Invoker;
import com.google.gwt.user.client.rpc.AsyncCallback;
/**
* this is a single threaded way to make a async call synch
* this is useful for developers who do NOT want to rewrite this
* all the time because everything in adi was done with the async api
* to allow for GWT, and other optimized remote calls
*
* @author scott
*
*/
public class AsyncToSyncAdaptor implements AsyncCallback, Runnable {
private Object result;
private Throwable caught;
private Object arg;
private I_Invoker action;
public void onFailure(Throwable caught) {
this.caught = caught;
}
public void onSuccess(Object arg0) {
this.result = arg0;
}
public Object getResult() {
return result;
}
public Throwable getCaught() {
return caught;
}
public Object getArg() {
return arg;
}
public void setArg(Object arg) {
this.arg = arg;
}
public I_Invoker getAction() {
return action;
}
public void setAction(I_Invoker action) {
this.action = action;
}
public void run() {
if (action == null) {
caught = new NullPointerException(getClass().getName() +
" must have a action to run!");
return;
}
action.invoke(arg, this);
}
public void dispose() {
result = null;
caught = null;
arg = null;
action = null;
}
/**
* this will make a synch call,
*
* DO NOT USE ON A GWT CLIENT UNLESS YOU HAVE A VERY SPECIAL REASON,
* IT WILL BE VERY SLOW
*
* ONLY use when you know the action (I_Invoker) isLocal()
* and should process in the Current Thread,
* otherwise use the Async API
*
* @param action
* @param arg
* @return
* @throws Throwable
*/
public static final Object run(I_Invoker action, Object arg) throws Throwable {
AsyncToSyncAdaptor adaptor = new AsyncToSyncAdaptor();
adaptor.setAction(action);
adaptor.setArg(arg);
adaptor.run();
Throwable t = adaptor.getCaught();
if (t != null) {
// Some Security Exception, just log
// and return nothing
adaptor.dispose();
throw t;
}
return adaptor.getResult();
}
}
| *** empty log message ***
| src/org/adligo/i/adi/client/AsyncToSyncAdaptor.java | *** empty log message *** | <ide><path>rc/org/adligo/i/adi/client/AsyncToSyncAdaptor.java
<ide>
<ide> Throwable t = adaptor.getCaught();
<ide> if (t != null) {
<del> // Some Security Exception, just log
<del> // and return nothing
<add> // Some Exception so throw it
<ide> adaptor.dispose();
<ide> throw t;
<ide> } |
|
Java | apache-2.0 | b625d6cd30cc1026e123fd3498f94b23db74a3f4 | 0 | raphw/byte-buddy,PascalSchumacher/byte-buddy,CodingFabian/byte-buddy,vic/byte-buddy,raphw/byte-buddy,RobAustin/byte-buddy,raphw/byte-buddy,mches/byte-buddy,DALDEI/byte-buddy | package net.bytebuddy.utility;
import net.bytebuddy.instrumentation.type.TypeDescription;
import net.bytebuddy.instrumentation.type.TypeList;
import net.bytebuddy.test.utility.JavaVersionRule;
import net.bytebuddy.test.utility.ObjectPropertyAssertion;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.MethodRule;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
public class JavaInstanceMethodTypeTest {
@Rule
public MethodRule javaVersionRule = new JavaVersionRule();
private static final String BAR = "bar", QUX = "qux";
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfLoadedType() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(void.class, Foo.class);
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Foo.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfMethod() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(Foo.class.getDeclaredMethod(BAR, Void.class));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfStaticMethod() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(Foo.class.getDeclaredMethod(QUX, Void.class));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfConstructor() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(Foo.class.getDeclaredConstructor(Void.class));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfGetter() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofGetter(Foo.class.getDeclaredField(BAR));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(Void.class)));
assertThat(methodType.getParameterTypes(), is(Collections.<TypeDescription>emptyList()));
}
@Test
public void testMethodTypeOfStaticGetter() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofGetter(Foo.class.getDeclaredField(QUX));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(Void.class)));
assertThat(methodType.getParameterTypes(), is(Collections.<TypeDescription>emptyList()));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfSetter() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofSetter(Foo.class.getDeclaredField(BAR));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfStaticSetter() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofSetter(Foo.class.getDeclaredField(QUX));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
public void testMethodTypeOfConstant() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofConstant(new Foo(null));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(Foo.class)));
assertThat(methodType.getParameterTypes(), is(Collections.<TypeDescription>emptyList()));
}
@Test
@SuppressWarnings("unchecked")
@JavaVersionRule.Enforce(7)
public void testMethodTypeOfLoadedMethodType() throws Exception {
Object loadedMethodType = JavaType.METHOD_TYPE.load().getDeclaredMethod("methodType", Class.class, Class[].class)
.invoke(null, void.class, new Class<?>[]{Object.class});
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(loadedMethodType);
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Object.class)));
}
@Test
public void testObjectProperties() throws Exception {
ObjectPropertyAssertion.of(JavaInstance.MethodType.class).apply();
}
public static class Foo {
Void bar;
static Void qux;
Foo(Void value) {
/* empty*/
}
void bar(Void value) {
/* empty */
}
static void qux(Void value) {
/* empty */
}
}
} | byte-buddy-dep/src/test/java/net/bytebuddy/utility/JavaInstanceMethodTypeTest.java | package net.bytebuddy.utility;
import net.bytebuddy.instrumentation.type.TypeDescription;
import net.bytebuddy.instrumentation.type.TypeList;
import net.bytebuddy.test.utility.ObjectPropertyAssertion;
import org.junit.Test;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
public class JavaInstanceMethodTypeTest {
private static final String BAR = "bar", QUX = "qux";
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfLoadedType() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(void.class, Foo.class);
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Foo.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfMethod() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(Foo.class.getDeclaredMethod(BAR, Void.class));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfStaticMethod() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(Foo.class.getDeclaredMethod(QUX, Void.class));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfConstructor() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(Foo.class.getDeclaredConstructor(Void.class));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfGetter() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofGetter(Foo.class.getDeclaredField(BAR));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(Void.class)));
assertThat(methodType.getParameterTypes(), is(Collections.<TypeDescription>emptyList()));
}
@Test
public void testMethodTypeOfStaticGetter() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofGetter(Foo.class.getDeclaredField(QUX));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(Void.class)));
assertThat(methodType.getParameterTypes(), is(Collections.<TypeDescription>emptyList()));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfSetter() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofSetter(Foo.class.getDeclaredField(BAR));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfStaticSetter() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofSetter(Foo.class.getDeclaredField(QUX));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Void.class)));
}
@Test
public void testMethodTypeOfConstant() throws Exception {
JavaInstance.MethodType methodType = JavaInstance.MethodType.ofConstant(new Foo(null));
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(Foo.class)));
assertThat(methodType.getParameterTypes(), is(Collections.<TypeDescription>emptyList()));
}
@Test
@SuppressWarnings("unchecked")
public void testMethodTypeOfLoadedMethodType() throws Exception {
Object loadedMethodType = JavaType.METHOD_TYPE.load().getDeclaredMethod("methodType", Class.class, Class[].class)
.invoke(null, void.class, new Class<?>[]{Object.class});
JavaInstance.MethodType methodType = JavaInstance.MethodType.of(loadedMethodType);
assertThat(methodType.getReturnType(), is((TypeDescription) new TypeDescription.ForLoadedType(void.class)));
assertThat(methodType.getParameterTypes(), is((List<TypeDescription>) new TypeList.ForLoadedType(Object.class)));
}
@Test
public void testObjectProperties() throws Exception {
ObjectPropertyAssertion.of(JavaInstance.MethodType.class).apply();
}
public static class Foo {
Void bar;
static Void qux;
Foo(Void value) {
/* empty*/
}
void bar(Void value) {
/* empty */
}
static void qux(Void value) {
/* empty */
}
}
} | Added version guard to method type test.
| byte-buddy-dep/src/test/java/net/bytebuddy/utility/JavaInstanceMethodTypeTest.java | Added version guard to method type test. | <ide><path>yte-buddy-dep/src/test/java/net/bytebuddy/utility/JavaInstanceMethodTypeTest.java
<ide>
<ide> import net.bytebuddy.instrumentation.type.TypeDescription;
<ide> import net.bytebuddy.instrumentation.type.TypeList;
<add>import net.bytebuddy.test.utility.JavaVersionRule;
<ide> import net.bytebuddy.test.utility.ObjectPropertyAssertion;
<add>import org.junit.Rule;
<ide> import org.junit.Test;
<add>import org.junit.rules.MethodRule;
<ide>
<ide> import java.util.Collections;
<ide> import java.util.List;
<ide> import static org.junit.Assert.assertThat;
<ide>
<ide> public class JavaInstanceMethodTypeTest {
<add>
<add> @Rule
<add> public MethodRule javaVersionRule = new JavaVersionRule();
<ide>
<ide> private static final String BAR = "bar", QUX = "qux";
<ide>
<ide>
<ide> @Test
<ide> @SuppressWarnings("unchecked")
<add> @JavaVersionRule.Enforce(7)
<ide> public void testMethodTypeOfLoadedMethodType() throws Exception {
<ide> Object loadedMethodType = JavaType.METHOD_TYPE.load().getDeclaredMethod("methodType", Class.class, Class[].class)
<ide> .invoke(null, void.class, new Class<?>[]{Object.class}); |
|
Java | epl-1.0 | 7a9e6d8f65a539cc3e191cd2f8935a1b4d3400a1 | 0 | edgarmueller/emfstore-rest | /*******************************************************************************
* Copyright (c) 2008-2011 Chair for Applied Software Engineering,
* Technische Universitaet Muenchen.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* wesendon
******************************************************************************/
package org.eclipse.emf.emfstore.internal.client.model.connectionmanager.xmlrpc;
import java.util.List;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.emfstore.common.extensionpoint.ESExtensionPoint;
import org.eclipse.emf.emfstore.internal.client.model.ServerInfo;
import org.eclipse.emf.emfstore.internal.client.model.connectionmanager.AbstractConnectionManager;
import org.eclipse.emf.emfstore.internal.client.model.connectionmanager.ConnectionManager;
import org.eclipse.emf.emfstore.internal.common.model.EMFStoreProperty;
import org.eclipse.emf.emfstore.internal.common.model.Project;
import org.eclipse.emf.emfstore.internal.server.connection.xmlrpc.XmlRpcConnectionHandler;
import org.eclipse.emf.emfstore.internal.server.exceptions.InvalidVersionSpecException;
import org.eclipse.emf.emfstore.internal.server.filetransfer.FileChunk;
import org.eclipse.emf.emfstore.internal.server.filetransfer.FileTransferInformation;
import org.eclipse.emf.emfstore.internal.server.model.AuthenticationInformation;
import org.eclipse.emf.emfstore.internal.server.model.ClientVersionInfo;
import org.eclipse.emf.emfstore.internal.server.model.ProjectHistory;
import org.eclipse.emf.emfstore.internal.server.model.ProjectId;
import org.eclipse.emf.emfstore.internal.server.model.ProjectInfo;
import org.eclipse.emf.emfstore.internal.server.model.SessionId;
import org.eclipse.emf.emfstore.internal.server.model.accesscontrol.ACOrgUnitId;
import org.eclipse.emf.emfstore.internal.server.model.accesscontrol.ACUser;
import org.eclipse.emf.emfstore.internal.server.model.accesscontrol.OrgUnitProperty;
import org.eclipse.emf.emfstore.internal.server.model.versioning.BranchInfo;
import org.eclipse.emf.emfstore.internal.server.model.versioning.BranchVersionSpec;
import org.eclipse.emf.emfstore.internal.server.model.versioning.ChangePackage;
import org.eclipse.emf.emfstore.internal.server.model.versioning.HistoryInfo;
import org.eclipse.emf.emfstore.internal.server.model.versioning.HistoryQuery;
import org.eclipse.emf.emfstore.internal.server.model.versioning.LogMessage;
import org.eclipse.emf.emfstore.internal.server.model.versioning.PrimaryVersionSpec;
import org.eclipse.emf.emfstore.internal.server.model.versioning.TagVersionSpec;
import org.eclipse.emf.emfstore.internal.server.model.versioning.VersionSpec;
import org.eclipse.emf.emfstore.server.exceptions.ESException;
/**
* XML RPC based Implementation of ConnectionManager.
*
* @author wesendon
*/
public class XmlRpcConnectionManager extends AbstractConnectionManager<XmlRpcClientManager> implements
ConnectionManager {
private static final String CONNECTION_MANAGER = "org.eclipse.emf.emfstore.client.connectionManager"; //$NON-NLS-1$
/**
* {@inheritDoc}
*/
public AuthenticationInformation logIn(String username, String password, ServerInfo serverInfo,
ClientVersionInfo clientVersionInfo) throws ESException {
final XmlRpcClientManager clientManager = new XmlRpcClientManager(XmlRpcConnectionHandler.EMFSTORE);
clientManager.initConnection(serverInfo);
final AuthenticationInformation authenticationInformation = clientManager.callWithResult("logIn",
AuthenticationInformation.class, username, password, clientVersionInfo);
addConnectionProxy(authenticationInformation.getSessionId(), clientManager);
return authenticationInformation;
}
/**
* {@inheritDoc}
*/
public void logout(SessionId sessionId) throws ESException {
getConnectionProxy(sessionId).call("logout", sessionId);
removeConnectionProxy(sessionId);
}
/**
* {@inheritDoc}
*/
public void addTag(SessionId sessionId, ProjectId projectId, PrimaryVersionSpec versionSpec, TagVersionSpec tag)
throws ESException {
getConnectionProxy(sessionId).call("addTag", sessionId, projectId, versionSpec, tag);
}
/**
* {@inheritDoc}
*/
public ProjectInfo createEmptyProject(SessionId sessionId, String name, String description, LogMessage logMessage)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("createEmptyProject", ProjectInfo.class, sessionId, name,
description, logMessage);
}
/**
* {@inheritDoc}
*/
public ProjectInfo createProject(SessionId sessionId, String name, String description, LogMessage logMessage,
Project project) throws ESException {
return getConnectionProxy(sessionId).callWithResult("createProject", ProjectInfo.class, sessionId, name,
description, logMessage, project);
}
/**
* {@inheritDoc}
*/
public PrimaryVersionSpec createVersion(SessionId sessionId, ProjectId projectId,
PrimaryVersionSpec baseVersionSpec, ChangePackage changePackage, BranchVersionSpec targetBranch,
PrimaryVersionSpec sourceVersion, LogMessage logMessage) throws ESException, InvalidVersionSpecException {
return getConnectionProxy(sessionId).callWithResult("createVersion", PrimaryVersionSpec.class, sessionId,
projectId, baseVersionSpec, changePackage, targetBranch, sourceVersion, logMessage);
}
/**
* {@inheritDoc}
*/
public void deleteProject(SessionId sessionId, ProjectId projectId, boolean deleteFiles) throws ESException {
getConnectionProxy(sessionId).call("deleteProject", sessionId, projectId, deleteFiles);
}
/**
* {@inheritDoc}
*/
public FileChunk downloadFileChunk(SessionId sessionId, ProjectId projectId, FileTransferInformation fileInformation)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("downloadFileChunk", FileChunk.class, sessionId, projectId,
fileInformation);
}
/**
* {@inheritDoc}
*/
public ProjectHistory exportProjectHistoryFromServer(SessionId sessionId, ProjectId projectId)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("exportProjectHistoryFromServer", ProjectHistory.class,
sessionId, projectId);
}
/**
* {@inheritDoc}
*/
public List<ChangePackage> getChanges(SessionId sessionId, ProjectId projectId, VersionSpec source,
VersionSpec target)
throws InvalidVersionSpecException, ESException {
return getConnectionProxy(sessionId).callWithListResult("getChanges", ChangePackage.class, sessionId,
projectId, source, target);
}
/**
*
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.internal.server.EMFStore#getBranches(org.eclipse.emf.emfstore.internal.server.model.SessionId,
* org.eclipse.emf.emfstore.internal.server.model.ProjectId)
*/
public List<BranchInfo> getBranches(SessionId sessionId, ProjectId projectId) throws ESException {
return getConnectionProxy(sessionId).callWithListResult("getBranches", BranchInfo.class, sessionId, projectId);
}
/**
* {@inheritDoc}
*/
public List<HistoryInfo> getHistoryInfo(SessionId sessionId, ProjectId projectId, HistoryQuery<?> historyQuery)
throws ESException {
return getConnectionProxy(sessionId).callWithListResult("getHistoryInfo", HistoryInfo.class, sessionId,
projectId, historyQuery);
}
/**
* {@inheritDoc}
*/
public Project getProject(SessionId sessionId, ProjectId projectId, VersionSpec versionSpec)
throws InvalidVersionSpecException, ESException {
return getConnectionProxy(sessionId).callWithResult("getProject", Project.class, sessionId, projectId,
versionSpec);
}
/**
* {@inheritDoc}
*/
public List<ProjectInfo> getProjectList(SessionId sessionId) throws ESException {
// return getConnectionProxy(sessionId).callWithListResult("getProjectList", ProjectInfo.class, sessionId);
final ESExtensionPoint extensionPoint = new ESExtensionPoint(
CONNECTION_MANAGER, true);
final ConnectionManager connectionManager = extensionPoint.getClass("class",
ConnectionManager.class);
return connectionManager.getProjectList(sessionId);
}
/**
* {@inheritDoc}
*/
public ProjectId importProjectHistoryToServer(SessionId sessionId, ProjectHistory projectHistory)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("importProjectHistoryToServer", ProjectId.class, sessionId,
projectHistory);
}
/**
* {@inheritDoc}
*/
public void removeTag(SessionId sessionId, ProjectId projectId, PrimaryVersionSpec versionSpec, TagVersionSpec tag)
throws ESException {
getConnectionProxy(sessionId).call("removeTag", sessionId, projectId, versionSpec, tag);
}
/**
* {@inheritDoc}
*/
public ACUser resolveUser(SessionId sessionId, ACOrgUnitId id) throws ESException {
return getConnectionProxy(sessionId).callWithResult("resolveUser", ACUser.class, sessionId, id);
}
/**
* {@inheritDoc}
*/
public PrimaryVersionSpec resolveVersionSpec(SessionId sessionId, ProjectId projectId, VersionSpec versionSpec)
throws InvalidVersionSpecException, ESException {
return getConnectionProxy(sessionId).callWithResult("resolveVersionSpec", PrimaryVersionSpec.class, sessionId,
projectId, versionSpec);
}
/**
* {@inheritDoc}
*/
public void transmitProperty(SessionId sessionId, OrgUnitProperty changedProperty, ACUser tmpUser,
ProjectId projectId) throws ESException {
getConnectionProxy(sessionId).call("transmitProperty", sessionId, changedProperty, tmpUser, projectId);
}
/**
* {@inheritDoc}
*/
public FileTransferInformation uploadFileChunk(SessionId sessionId, ProjectId projectId, FileChunk fileChunk)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("uploadFileChunk", FileTransferInformation.class,
sessionId, projectId, fileChunk);
}
/**
* {@inheritDoc}
*/
public List<EMFStoreProperty> setEMFProperties(SessionId sessionId, List<EMFStoreProperty> properties,
ProjectId projectId) throws ESException {
return getConnectionProxy(sessionId).callWithListResult("setEMFProperties", EMFStoreProperty.class, sessionId,
properties, projectId);
}
/**
* {@inheritDoc}
*/
public List<EMFStoreProperty> getEMFProperties(SessionId sessionId, ProjectId projectId) throws ESException {
return getConnectionProxy(sessionId).callWithListResult("getEMFProperties", EMFStoreProperty.class, sessionId,
projectId);
}
/**
*
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.internal.client.model.connectionmanager.ConnectionManager#isLoggedIn(org.eclipse.emf.emfstore.internal.server.model.SessionId)
*/
public boolean isLoggedIn(SessionId id) {
return hasConnectionProxy(id);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.internal.server.EMFStore#registerEPackage(org.eclipse.emf.emfstore.internal.server.model.SessionId,
* org.eclipse.emf.ecore.EPackage)
*/
public void registerEPackage(SessionId sessionId, EPackage pkg) throws ESException {
getConnectionProxy(sessionId).call("registerEPackage", sessionId, pkg);
}
}
| bundles/org.eclipse.emf.emfstore.client/src/org/eclipse/emf/emfstore/internal/client/model/connectionmanager/xmlrpc/XmlRpcConnectionManager.java | /*******************************************************************************
* Copyright (c) 2008-2011 Chair for Applied Software Engineering,
* Technische Universitaet Muenchen.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* wesendon
******************************************************************************/
package org.eclipse.emf.emfstore.internal.client.model.connectionmanager.xmlrpc;
import java.util.List;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.emfstore.common.extensionpoint.ESExtensionPoint;
import org.eclipse.emf.emfstore.internal.client.model.ServerInfo;
import org.eclipse.emf.emfstore.internal.client.model.connectionmanager.AbstractConnectionManager;
import org.eclipse.emf.emfstore.internal.client.model.connectionmanager.ConnectionManager;
import org.eclipse.emf.emfstore.internal.common.model.EMFStoreProperty;
import org.eclipse.emf.emfstore.internal.common.model.Project;
import org.eclipse.emf.emfstore.internal.server.connection.xmlrpc.XmlRpcConnectionHandler;
import org.eclipse.emf.emfstore.internal.server.exceptions.InvalidVersionSpecException;
import org.eclipse.emf.emfstore.internal.server.filetransfer.FileChunk;
import org.eclipse.emf.emfstore.internal.server.filetransfer.FileTransferInformation;
import org.eclipse.emf.emfstore.internal.server.model.AuthenticationInformation;
import org.eclipse.emf.emfstore.internal.server.model.ClientVersionInfo;
import org.eclipse.emf.emfstore.internal.server.model.ProjectHistory;
import org.eclipse.emf.emfstore.internal.server.model.ProjectId;
import org.eclipse.emf.emfstore.internal.server.model.ProjectInfo;
import org.eclipse.emf.emfstore.internal.server.model.SessionId;
import org.eclipse.emf.emfstore.internal.server.model.accesscontrol.ACOrgUnitId;
import org.eclipse.emf.emfstore.internal.server.model.accesscontrol.ACUser;
import org.eclipse.emf.emfstore.internal.server.model.accesscontrol.OrgUnitProperty;
import org.eclipse.emf.emfstore.internal.server.model.versioning.BranchInfo;
import org.eclipse.emf.emfstore.internal.server.model.versioning.BranchVersionSpec;
import org.eclipse.emf.emfstore.internal.server.model.versioning.ChangePackage;
import org.eclipse.emf.emfstore.internal.server.model.versioning.HistoryInfo;
import org.eclipse.emf.emfstore.internal.server.model.versioning.HistoryQuery;
import org.eclipse.emf.emfstore.internal.server.model.versioning.LogMessage;
import org.eclipse.emf.emfstore.internal.server.model.versioning.PrimaryVersionSpec;
import org.eclipse.emf.emfstore.internal.server.model.versioning.TagVersionSpec;
import org.eclipse.emf.emfstore.internal.server.model.versioning.VersionSpec;
import org.eclipse.emf.emfstore.server.exceptions.ESException;
/**
* XML RPC based Implementation of ConnectionManager.
*
* @author wesendon
*/
public class XmlRpcConnectionManager extends AbstractConnectionManager<XmlRpcClientManager> implements
ConnectionManager {
private static final String CONNECTION_MANAGER = "org.eclipse.emf.emfstore.client.connectionManager"; //$NON-NLS-1$
/**
* {@inheritDoc}
*/
public AuthenticationInformation logIn(String username, String password, ServerInfo serverInfo,
ClientVersionInfo clientVersionInfo) throws ESException {
final XmlRpcClientManager clientManager = new XmlRpcClientManager(XmlRpcConnectionHandler.EMFSTORE);
clientManager.initConnection(serverInfo);
final AuthenticationInformation authenticationInformation = clientManager.callWithResult("logIn",
AuthenticationInformation.class, username, password, clientVersionInfo);
addConnectionProxy(authenticationInformation.getSessionId(), clientManager);
return authenticationInformation;
}
/**
* {@inheritDoc}
*/
public void logout(SessionId sessionId) throws ESException {
getConnectionProxy(sessionId).call("logout", sessionId);
removeConnectionProxy(sessionId);
}
/**
* {@inheritDoc}
*/
public void addTag(SessionId sessionId, ProjectId projectId, PrimaryVersionSpec versionSpec, TagVersionSpec tag)
throws ESException {
getConnectionProxy(sessionId).call("addTag", sessionId, projectId, versionSpec, tag);
}
/**
* {@inheritDoc}
*/
public ProjectInfo createEmptyProject(SessionId sessionId, String name, String description, LogMessage logMessage)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("createEmptyProject", ProjectInfo.class, sessionId, name,
description, logMessage);
}
/**
* {@inheritDoc}
*/
public ProjectInfo createProject(SessionId sessionId, String name, String description, LogMessage logMessage,
Project project) throws ESException {
return getConnectionProxy(sessionId).callWithResult("createProject", ProjectInfo.class, sessionId, name,
description, logMessage, project);
}
/**
* {@inheritDoc}
*/
public PrimaryVersionSpec createVersion(SessionId sessionId, ProjectId projectId,
PrimaryVersionSpec baseVersionSpec, ChangePackage changePackage, BranchVersionSpec targetBranch,
PrimaryVersionSpec sourceVersion, LogMessage logMessage) throws ESException, InvalidVersionSpecException {
return getConnectionProxy(sessionId).callWithResult("createVersion", PrimaryVersionSpec.class, sessionId,
projectId, baseVersionSpec, changePackage, targetBranch, sourceVersion, logMessage);
}
/**
* {@inheritDoc}
*/
public void deleteProject(SessionId sessionId, ProjectId projectId, boolean deleteFiles) throws ESException {
getConnectionProxy(sessionId).call("deleteProject", sessionId, projectId, deleteFiles);
}
/**
* {@inheritDoc}
*/
public FileChunk downloadFileChunk(SessionId sessionId, ProjectId projectId, FileTransferInformation fileInformation)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("downloadFileChunk", FileChunk.class, sessionId, projectId,
fileInformation);
}
/**
* {@inheritDoc}
*/
public ProjectHistory exportProjectHistoryFromServer(SessionId sessionId, ProjectId projectId)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("exportProjectHistoryFromServer", ProjectHistory.class,
sessionId, projectId);
}
/**
* {@inheritDoc}
*/
public List<ChangePackage> getChanges(SessionId sessionId, ProjectId projectId, VersionSpec source,
VersionSpec target)
throws InvalidVersionSpecException, ESException {
return getConnectionProxy(sessionId).callWithListResult("getChanges", ChangePackage.class, sessionId,
projectId, source, target);
}
/**
*
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.internal.server.EMFStore#getBranches(org.eclipse.emf.emfstore.internal.server.model.SessionId,
* org.eclipse.emf.emfstore.internal.server.model.ProjectId)
*/
public List<BranchInfo> getBranches(SessionId sessionId, ProjectId projectId) throws ESException {
return getConnectionProxy(sessionId).callWithListResult("getBranches", BranchInfo.class, sessionId, projectId);
}
/**
* {@inheritDoc}
*/
public List<HistoryInfo> getHistoryInfo(SessionId sessionId, ProjectId projectId, HistoryQuery<?> historyQuery)
throws ESException {
return getConnectionProxy(sessionId).callWithListResult("getHistoryInfo", HistoryInfo.class, sessionId,
projectId, historyQuery);
}
/**
* {@inheritDoc}
*/
public Project getProject(SessionId sessionId, ProjectId projectId, VersionSpec versionSpec)
throws InvalidVersionSpecException, ESException {
return getConnectionProxy(sessionId).callWithResult("getProject", Project.class, sessionId, projectId,
versionSpec);
}
/**
* {@inheritDoc}
*/
public List<ProjectInfo> getProjectList(SessionId sessionId) throws ESException {
// return getConnectionProxy(sessionId).callWithListResult("getProjectList", ProjectInfo.class, sessionId);
// JaxrsConnectionManager cm;
// cm = new JaxrsConnectionManager();// KeyStoreManager.getInstance().getSSLContext());
// return cm.getProjectList();
final ESExtensionPoint extensionPoint = new ESExtensionPoint(
CONNECTION_MANAGER, true);
final ConnectionManager connectionManager = extensionPoint.getClass("class",
ConnectionManager.class);
// try {
// // Thread.sleep(12000);
// } catch (final InterruptedException ex) {
// // TODO Auto-generated catch block
// }
return connectionManager.getProjectList(sessionId);
}
/**
* {@inheritDoc}
*/
public ProjectId importProjectHistoryToServer(SessionId sessionId, ProjectHistory projectHistory)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("importProjectHistoryToServer", ProjectId.class, sessionId,
projectHistory);
}
/**
* {@inheritDoc}
*/
public void removeTag(SessionId sessionId, ProjectId projectId, PrimaryVersionSpec versionSpec, TagVersionSpec tag)
throws ESException {
getConnectionProxy(sessionId).call("removeTag", sessionId, projectId, versionSpec, tag);
}
/**
* {@inheritDoc}
*/
public ACUser resolveUser(SessionId sessionId, ACOrgUnitId id) throws ESException {
return getConnectionProxy(sessionId).callWithResult("resolveUser", ACUser.class, sessionId, id);
}
/**
* {@inheritDoc}
*/
public PrimaryVersionSpec resolveVersionSpec(SessionId sessionId, ProjectId projectId, VersionSpec versionSpec)
throws InvalidVersionSpecException, ESException {
return getConnectionProxy(sessionId).callWithResult("resolveVersionSpec", PrimaryVersionSpec.class, sessionId,
projectId, versionSpec);
}
/**
* {@inheritDoc}
*/
public void transmitProperty(SessionId sessionId, OrgUnitProperty changedProperty, ACUser tmpUser,
ProjectId projectId) throws ESException {
getConnectionProxy(sessionId).call("transmitProperty", sessionId, changedProperty, tmpUser, projectId);
}
/**
* {@inheritDoc}
*/
public FileTransferInformation uploadFileChunk(SessionId sessionId, ProjectId projectId, FileChunk fileChunk)
throws ESException {
return getConnectionProxy(sessionId).callWithResult("uploadFileChunk", FileTransferInformation.class,
sessionId, projectId, fileChunk);
}
/**
* {@inheritDoc}
*/
public List<EMFStoreProperty> setEMFProperties(SessionId sessionId, List<EMFStoreProperty> properties,
ProjectId projectId) throws ESException {
return getConnectionProxy(sessionId).callWithListResult("setEMFProperties", EMFStoreProperty.class, sessionId,
properties, projectId);
}
/**
* {@inheritDoc}
*/
public List<EMFStoreProperty> getEMFProperties(SessionId sessionId, ProjectId projectId) throws ESException {
return getConnectionProxy(sessionId).callWithListResult("getEMFProperties", EMFStoreProperty.class, sessionId,
projectId);
}
/**
*
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.internal.client.model.connectionmanager.ConnectionManager#isLoggedIn(org.eclipse.emf.emfstore.internal.server.model.SessionId)
*/
public boolean isLoggedIn(SessionId id) {
return hasConnectionProxy(id);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.emfstore.internal.server.EMFStore#registerEPackage(org.eclipse.emf.emfstore.internal.server.model.SessionId,
* org.eclipse.emf.ecore.EPackage)
*/
public void registerEPackage(SessionId sessionId, EPackage pkg) throws ESException {
getConnectionProxy(sessionId).call("registerEPackage", sessionId, pkg);
}
}
| removed trash comments
| bundles/org.eclipse.emf.emfstore.client/src/org/eclipse/emf/emfstore/internal/client/model/connectionmanager/xmlrpc/XmlRpcConnectionManager.java | removed trash comments | <ide><path>undles/org.eclipse.emf.emfstore.client/src/org/eclipse/emf/emfstore/internal/client/model/connectionmanager/xmlrpc/XmlRpcConnectionManager.java
<ide> public List<ProjectInfo> getProjectList(SessionId sessionId) throws ESException {
<ide> // return getConnectionProxy(sessionId).callWithListResult("getProjectList", ProjectInfo.class, sessionId);
<ide>
<del> // JaxrsConnectionManager cm;
<del> // cm = new JaxrsConnectionManager();// KeyStoreManager.getInstance().getSSLContext());
<del> // return cm.getProjectList();
<del>
<ide> final ESExtensionPoint extensionPoint = new ESExtensionPoint(
<ide> CONNECTION_MANAGER, true);
<ide>
<ide> final ConnectionManager connectionManager = extensionPoint.getClass("class",
<ide> ConnectionManager.class);
<del>
<del> // try {
<del> // // Thread.sleep(12000);
<del> // } catch (final InterruptedException ex) {
<del> // // TODO Auto-generated catch block
<del> // }
<ide>
<ide> return connectionManager.getProjectList(sessionId);
<ide> } |
|
Java | apache-2.0 | fd4e741f387bd7b28473b55b35ca4ac945834b93 | 0 | dreedyman/Rio,dreedyman/Rio,dreedyman/Rio | /*
* Copyright to the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.test.bean;
import net.jini.core.lookup.ServiceItem;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.rioproject.associations.AssociationDescriptor;
import org.rioproject.associations.AssociationType;
import org.rioproject.cybernode.Cybernode;
import org.rioproject.deploy.ServiceBeanInstance;
import org.rioproject.event.BasicEventConsumer;
import org.rioproject.event.RemoteServiceEvent;
import org.rioproject.event.RemoteServiceEventListener;
import org.rioproject.monitor.ProvisionFailureEvent;
import org.rioproject.monitor.ProvisionMonitor;
import org.rioproject.monitor.ProvisionMonitorEvent;
import org.rioproject.opstring.*;
import org.rioproject.test.RioTestRunner;
import org.rioproject.test.SetTestManager;
import org.rioproject.test.TestManager;
/**
* Test pre and post advertisement invocations
*
* @author Dennis Reedy
*/
@RunWith(RioTestRunner.class)
public class AdvertiseLifecycleTest {
@SetTestManager
static TestManager testManager;
ServiceItem[] monitorItems;
ProvisionMonitor monitor;
Cybernode cybernode;
@Before
public void getServices() {
monitorItems = testManager.getServiceItems(ProvisionMonitor.class);
Assert.assertEquals(1, monitorItems.length);
monitor = (ProvisionMonitor)monitorItems[0].service;
cybernode = (Cybernode)testManager.waitForService(Cybernode.class);
}
@Test
public void testThatServiceThatThrowsDuringPreAdvertiseDoesNotGetDeployed() throws Exception {
String opStringName = "Foo";
ServiceElement element = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"Test",
opStringName,
Boolean.TRUE.toString(),
Boolean.FALSE.toString(),
1);
PFEListener listener = new PFEListener();
BasicEventConsumer eventConsumer = new BasicEventConsumer(ProvisionFailureEvent.getEventDescriptor(), listener);
eventConsumer.register(monitorItems[0]);
OpString opString = new OpString(opStringName, null);
opString.addService(element);
Assert.assertNotNull(monitor);
testManager.deploy(opString, monitor);
for(int i=0; i<10; i++) {
if(listener.failed!=null) {
break;
}
Thread.sleep(500);
}
Assert.assertNotNull(listener.failed);
ServiceBeanInstance[] instances = cybernode.getServiceBeanInstances(element);
Assert.assertEquals(0, instances.length);
eventConsumer.terminate();
}
@Test
public void testThatServiceThatThrowsDuringPostAdvertiseWithRequiresAssociation() throws Exception {
String opStringName = "Bar";
ServiceElement element1 = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"Test",
opStringName,
Boolean.FALSE.toString(),
Boolean.TRUE.toString(),
1);
ServiceElement element2 = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"DependsOn",
opStringName,
Boolean.FALSE.toString(),
Boolean.FALSE.toString(),
1);
AssociationDescriptor descriptor = new AssociationDescriptor(AssociationType.REQUIRES, "DependsOn");
descriptor.setMatchOnName(true);
descriptor.setOperationalStringName(element2.getOperationalStringName());
descriptor.setGroups(testManager.getGroups());
element1.addAssociationDescriptors(descriptor);
OpString opString = new OpString(opStringName, null);
opString.addService(element1);
opString.addService(element2);
OperationalStringManager manager = testManager.deploy(opString, monitor);
Assert.assertNotNull(manager);
testManager.waitForDeployment(manager);
PMEListener listener = new PMEListener(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED);
BasicEventConsumer eventConsumer = new BasicEventConsumer(ProvisionMonitorEvent.getEventDescriptor(), listener);
eventConsumer.register(monitorItems[0]);
manager.removeServiceElement(element2, true);
for(int i=0; i<10; i++) {
if(listener.event!=null &&
listener.event.getAction().equals(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED)) {
break;
}
Thread.sleep(500);
}
eventConsumer.terminate();
Assert.assertNotNull(listener.event);
Assert.assertEquals(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED, listener.event.getAction());
Assert.assertEquals(element1, listener.event.getServiceElement());
OperationalString operationalString = manager.getOperationalString();
Assert.assertEquals(1, operationalString.getServices().length);
Assert.assertEquals("Test", operationalString.getServices()[0].getName());
Assert.assertEquals(0, operationalString.getServices()[0].getPlanned());
}
@Test
public void testThatServiceThrowsDuringPreAdvertiseWithRequiresAssociation() throws Exception {
ServiceElement element1 = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"Test",
"FooBar",
Boolean.TRUE.toString(),
Boolean.FALSE.toString(),
1);
ServiceElement element2 = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"DependsOn",
"FooBar",
Boolean.FALSE.toString(),
Boolean.FALSE.toString(),
0);
AssociationDescriptor descriptor = new AssociationDescriptor(AssociationType.REQUIRES, "DependsOn");
descriptor.setMatchOnName(true);
descriptor.setOperationalStringName(element2.getOperationalStringName());
descriptor.setGroups(testManager.getGroups());
element1.setAssociationDescriptors(descriptor);
OpString opString = new OpString("FooBar", null);
opString.addService(element1);
opString.addService(element2);
OperationalStringManager manager = testManager.deploy(opString, monitor);
Assert.assertNotNull(manager);
ServiceBeanInstance[] instances = new ServiceBeanInstance[0];
for(int i=0; i<10; i++) {
instances = cybernode.getServiceBeanInstances(element1);
if(instances.length>0){
break;
}
Thread.sleep(500);
}
Assert.assertEquals(1, instances.length);
manager.increment(element2, true, null);
PMEListener listener = new PMEListener(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED);
BasicEventConsumer eventConsumer = new BasicEventConsumer(ProvisionMonitorEvent.getEventDescriptor(), listener);
eventConsumer.register(monitorItems[0]);
testManager.waitForService("DependsOn");
for(int i=0; i<10; i++) {
if(listener.event!=null &&
listener.event.getAction().equals(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED)) {
break;
}
Thread.sleep(500);
}
eventConsumer.terminate();
Assert.assertNotNull(listener.event);
Assert.assertEquals(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED, listener.event.getAction());
instances = cybernode.getServiceBeanInstances(element1);
Assert.assertEquals(0, instances.length);
instances = manager.getServiceBeanInstances(element1);
Assert.assertEquals(0, instances.length);
OperationalString operationalString = manager.getOperationalString();
ServiceElement element1AfterDecrement = null;
for(ServiceElement service : operationalString.getServices()) {
if(service.getName().equals("Test")) {
element1AfterDecrement = service;
break;
}
}
Assert.assertNotNull(element1AfterDecrement);
Assert.assertEquals(0, element1AfterDecrement.getPlanned());
}
private ServiceElement makeServiceElement(String implClass,
String name,
String opstringName,
String throwOnPreAdvertise,
String throwOnPostUnAdvertise,
int planned) {
ServiceElement elem = new ServiceElement();
ClassBundle main = new ClassBundle(implClass,
new String[]{System.getProperty("user.dir")+"/target/test-classes/"},
"file://");
elem.setComponentBundle(main);
ClassBundle export = new ClassBundle(org.rioproject.resources.servicecore.Service.class.getName(),
new String[]{System.getProperty("user.dir")+"/target/test-classes/"},
"file://");
elem.setExportBundles(export);
ServiceBeanConfig sbc = new ServiceBeanConfig();
sbc.setName(name);
sbc.setGroups(System.getProperty("org.rioproject.groups"));
sbc.addInitParameter("throwOnPreAdvertise", Boolean.valueOf(throwOnPreAdvertise));
sbc.addInitParameter("throwOnPostUnAdvertise", Boolean.valueOf(throwOnPostUnAdvertise));
elem.setServiceBeanConfig(sbc);
elem.setOperationalStringName(opstringName);
elem.setPlanned(planned);
elem.setFaultDetectionHandlerBundle(null);
return elem;
}
class PFEListener implements RemoteServiceEventListener {
ProvisionFailureEvent failed;
public void notify(RemoteServiceEvent event) {
failed = (ProvisionFailureEvent)event;
}
}
class PMEListener implements RemoteServiceEventListener {
ProvisionMonitorEvent event;
ProvisionMonitorEvent.Action actionToMatch;
PMEListener(ProvisionMonitorEvent.Action actionToMatch) {
this.actionToMatch = actionToMatch;
}
public void notify(RemoteServiceEvent rEvent) {
if(((ProvisionMonitorEvent)rEvent).getAction().equals(actionToMatch))
event = (ProvisionMonitorEvent)rEvent;
}
}
}
| rio-test/src/test/java/org/rioproject/test/bean/AdvertiseLifecycleTest.java | /*
* Copyright to the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.test.bean;
import net.jini.core.lookup.ServiceItem;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.rioproject.associations.AssociationDescriptor;
import org.rioproject.associations.AssociationType;
import org.rioproject.cybernode.Cybernode;
import org.rioproject.deploy.ServiceBeanInstance;
import org.rioproject.event.BasicEventConsumer;
import org.rioproject.event.RemoteServiceEvent;
import org.rioproject.event.RemoteServiceEventListener;
import org.rioproject.monitor.ProvisionFailureEvent;
import org.rioproject.monitor.ProvisionMonitor;
import org.rioproject.monitor.ProvisionMonitorEvent;
import org.rioproject.opstring.*;
import org.rioproject.test.RioTestRunner;
import org.rioproject.test.SetTestManager;
import org.rioproject.test.TestManager;
/**
* Test pre and post advertisement invocations
*
* @author Dennis Reedy
*/
@RunWith(RioTestRunner.class)
public class AdvertiseLifecycleTest {
@SetTestManager
static TestManager testManager;
ServiceItem[] monitorItems;
ProvisionMonitor monitor;
Cybernode cybernode;
@Before
public void getServices() {
monitorItems = testManager.getServiceItems(ProvisionMonitor.class);
Assert.assertEquals(1, monitorItems.length);
monitor = (ProvisionMonitor)monitorItems[0].service;
cybernode = (Cybernode)testManager.waitForService(Cybernode.class);
}
@Test
public void testThatServiceThatThrowsDuringPreAdvertiseDoesNotGetDeployed() throws Exception {
String opStringName = "Foo";
ServiceElement element = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"Test",
opStringName,
Boolean.TRUE.toString(),
Boolean.FALSE.toString(),
1);
PFEListener listener = new PFEListener();
BasicEventConsumer eventConsumer = new BasicEventConsumer(ProvisionFailureEvent.getEventDescriptor(), listener);
eventConsumer.register(monitorItems[0]);
OpString opString = new OpString(opStringName, null);
opString.addService(element);
Assert.assertNotNull(monitor);
testManager.deploy(opString, monitor);
for(int i=0; i<10; i++) {
if(listener.failed!=null) {
break;
}
Thread.sleep(500);
}
Assert.assertNotNull(listener.failed);
ServiceBeanInstance[] instances = cybernode.getServiceBeanInstances(element);
Assert.assertEquals(0, instances.length);
eventConsumer.terminate();
}
@Test
public void testThatServiceThatThrowsDuringPostAdvertiseWithRequiresAssociation() throws Exception {
String opStringName = "Bar";
ServiceElement element1 = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"Test",
opStringName,
Boolean.FALSE.toString(),
Boolean.TRUE.toString(),
1);
ServiceElement element2 = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"DependsOn",
opStringName,
Boolean.FALSE.toString(),
Boolean.FALSE.toString(),
1);
AssociationDescriptor descriptor = new AssociationDescriptor(AssociationType.REQUIRES, "DependsOn");
descriptor.setMatchOnName(true);
descriptor.setOperationalStringName(element2.getOperationalStringName());
descriptor.setGroups(testManager.getGroups());
element1.setAssociationDescriptors(descriptor);
OpString opString = new OpString(opStringName, null);
opString.addService(element1);
opString.addService(element2);
OperationalStringManager manager = testManager.deploy(opString, monitor);
Assert.assertNotNull(manager);
testManager.waitForDeployment(manager);
PMEListener listener = new PMEListener(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED);
BasicEventConsumer eventConsumer = new BasicEventConsumer(ProvisionMonitorEvent.getEventDescriptor(), listener);
eventConsumer.register(monitorItems[0]);
manager.removeServiceElement(element2, true);
for(int i=0; i<10; i++) {
if(listener.event!=null &&
listener.event.getAction().equals(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED)) {
break;
}
Thread.sleep(500);
}
eventConsumer.terminate();
Assert.assertNotNull(listener.event);
Assert.assertEquals(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED, listener.event.getAction());
Assert.assertEquals(element1, listener.event.getServiceElement());
OperationalString operationalString = manager.getOperationalString();
Assert.assertEquals(1, operationalString.getServices().length);
Assert.assertEquals("Test", operationalString.getServices()[0].getName());
Assert.assertEquals(0, operationalString.getServices()[0].getPlanned());
}
@Test
public void testThatServiceThrowsDuringPreAdvertiseWithRequiresAssociation() throws Exception {
ServiceElement element1 = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"Test",
"FooBar",
Boolean.TRUE.toString(),
Boolean.FALSE.toString(),
1);
ServiceElement element2 = makeServiceElement(ServiceThatThrowsDuringAdvertiseCallbacks.class.getName(),
"DependsOn",
"FooBar",
Boolean.FALSE.toString(),
Boolean.FALSE.toString(),
0);
AssociationDescriptor descriptor = new AssociationDescriptor(AssociationType.REQUIRES, "DependsOn");
descriptor.setMatchOnName(true);
descriptor.setOperationalStringName(element2.getOperationalStringName());
descriptor.setGroups(testManager.getGroups());
element1.setAssociationDescriptors(descriptor);
OpString opString = new OpString("FooBar", null);
opString.addService(element1);
opString.addService(element2);
OperationalStringManager manager = testManager.deploy(opString, monitor);
Assert.assertNotNull(manager);
ServiceBeanInstance[] instances = new ServiceBeanInstance[0];
for(int i=0; i<10; i++) {
instances = cybernode.getServiceBeanInstances(element1);
if(instances.length>0){
break;
}
Thread.sleep(500);
}
Assert.assertEquals(1, instances.length);
manager.increment(element2, true, null);
PMEListener listener = new PMEListener(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED);
BasicEventConsumer eventConsumer = new BasicEventConsumer(ProvisionMonitorEvent.getEventDescriptor(), listener);
eventConsumer.register(monitorItems[0]);
testManager.waitForService("DependsOn");
for(int i=0; i<10; i++) {
if(listener.event!=null &&
listener.event.getAction().equals(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED)) {
break;
}
Thread.sleep(500);
}
eventConsumer.terminate();
Assert.assertNotNull(listener.event);
Assert.assertEquals(ProvisionMonitorEvent.Action.SERVICE_BEAN_DECREMENTED, listener.event.getAction());
instances = cybernode.getServiceBeanInstances(element1);
Assert.assertEquals(0, instances.length);
instances = manager.getServiceBeanInstances(element1);
Assert.assertEquals(0, instances.length);
OperationalString operationalString = manager.getOperationalString();
ServiceElement element1AfterDecrement = null;
for(ServiceElement service : operationalString.getServices()) {
if(service.getName().equals("Test")) {
element1AfterDecrement = service;
break;
}
}
Assert.assertNotNull(element1AfterDecrement);
Assert.assertEquals(0, element1AfterDecrement.getPlanned());
}
private ServiceElement makeServiceElement(String implClass,
String name,
String opstringName,
String throwOnPreAdvertise,
String throwOnPostUnAdvertise,
int planned) {
ServiceElement elem = new ServiceElement();
ClassBundle main = new ClassBundle(implClass,
new String[]{System.getProperty("user.dir")+"/target/test-classes/"},
"file://");
elem.setComponentBundle(main);
ClassBundle export = new ClassBundle(org.rioproject.resources.servicecore.Service.class.getName(),
new String[]{System.getProperty("user.dir")+"/target/test-classes/"},
"file://");
elem.setExportBundles(export);
ServiceBeanConfig sbc = new ServiceBeanConfig();
sbc.setName(name);
sbc.setGroups(System.getProperty("org.rioproject.groups"));
sbc.addInitParameter("throwOnPreAdvertise", Boolean.valueOf(throwOnPreAdvertise));
sbc.addInitParameter("throwOnPostUnAdvertise", Boolean.valueOf(throwOnPostUnAdvertise));
elem.setServiceBeanConfig(sbc);
elem.setOperationalStringName(opstringName);
elem.setPlanned(planned);
elem.setFaultDetectionHandlerBundle(null);
return elem;
}
class PFEListener implements RemoteServiceEventListener {
ProvisionFailureEvent failed;
public void notify(RemoteServiceEvent event) {
failed = (ProvisionFailureEvent)event;
}
}
class PMEListener implements RemoteServiceEventListener {
ProvisionMonitorEvent event;
ProvisionMonitorEvent.Action actionToMatch;
PMEListener(ProvisionMonitorEvent.Action actionToMatch) {
this.actionToMatch = actionToMatch;
}
public void notify(RemoteServiceEvent rEvent) {
if(((ProvisionMonitorEvent)rEvent).getAction().equals(actionToMatch))
event = (ProvisionMonitorEvent)rEvent;
}
}
}
| Call ServiceElement.addAssociationDescriptors() instead of deprecated ServiceElement.setAssociationDescriptors()
| rio-test/src/test/java/org/rioproject/test/bean/AdvertiseLifecycleTest.java | Call ServiceElement.addAssociationDescriptors() instead of deprecated ServiceElement.setAssociationDescriptors() | <ide><path>io-test/src/test/java/org/rioproject/test/bean/AdvertiseLifecycleTest.java
<ide> descriptor.setMatchOnName(true);
<ide> descriptor.setOperationalStringName(element2.getOperationalStringName());
<ide> descriptor.setGroups(testManager.getGroups());
<del> element1.setAssociationDescriptors(descriptor);
<add> element1.addAssociationDescriptors(descriptor);
<ide> OpString opString = new OpString(opStringName, null);
<ide> opString.addService(element1);
<ide> opString.addService(element2); |
|
JavaScript | mit | d2cf17b650572738ad7eed31ac125035c777b462 | 0 | idiap/inevent,idiap/inevent | // CLASS WIDGET MANAGER
function Graph() {
this.initVars = function() {
this.small_rect = [75.0, 56.0] ;
this.big_rect = [300.0, 250.0] ;
this.image_rect = [this.big_rect[0] - 40, this.big_rect[1] - 70] ;
this.margin = {top: 0, right: 0, bottom: 0, left: 0} ;
this.graph_width = this.width - this.margin.left - this.margin.right ;
this.graph_height = this.height - this.margin.top - this.margin.bottom ;
this.graph_top = this.top;
this.graph_left = this.left;
this.queue = [] ;
this.excluded = [] ; //SD/ to store node who already displays neighbours
this.input_links = [] ;
this.svg = d3.select("#" + this.graph_id).append("svg")
.attr("width", this.graph_width)
.attr("height", this.graph_height);
this.color = { black:"black",
grey:"grey"} ;
}
this.loadGraph = function(data, graph_id, max_size, max_depth, max_neighbours, width, height, top, left, video_switch) {
//SD/ Set parameters in local vars
this.input_nodes = data ;
this.graph_id = graph_id ;
this.height = height ;
this.width = width ;
this.top = top ;
this.left = left ;
this.max_size = max_size ;
this.max_depth = max_depth ;
this.max_neighbours = max_neighbours ;
//SD/ Set optional parameters with default values in local vars
this.video_switch = typeof video_switch !== 'undefined' ? video_switch : false;
this.initVars();
this.force = d3.layout.force()
.linkDistance(150)
.charge(-50)
.gravity(0.01)
.nodes(this.input_nodes)
.links(this.input_links)
.size([this.graph_width, this.graph_height])
.on("tick", this.boundedTick.bind(this))
.start();
this.displayNodes();
this.displayLinks();
}
this.updateGraph = function(new_data) {
_this = this ;
//SD/ Merge graphed nodes with new nodes removing duplicates
if(typeof new_data['nodes'] !== undefined && new_data['nodes'] != null) {
if(new_data['nodes'].length > 0) {
//SD/ Unique isn't working here
//this.input_nodes = $.unique($.merge(this.input_nodes, new_data['nodes'])) ;
for(var i = 0 ; i < new_data['nodes'].length ; i++)
this.addNodeIfUnique(new_data['nodes'][i]) ;
}
}
//SD/ Prepare link and push them to input_links
links = new_data['links'] ;
if(typeof new_data['links'] !== undefined && new_data['links'] != null) {
for (var s = 0; s < links.length; s++) {
target = this.find_node_index(links[s]['target']);
source = this.find_node_index(links[s]['source']);
if (target!=-1 && source!=-1) {
this.input_links.push({
"target":target,
"source":source,
"depth":links[s]['depth'],
"weight":links[s]['weight']
})
}
else{
if (target == -1){
console.log("Missing target node: "+links[s]['target'])
}
if (source == -1){
console.log("Missing source node : "+links[s]['source'])
}
}
}
}
this.displayNodes();
this.displayLinks();
this.force.start();
}
this.setCenter = function(d) {
if(d.id == this.input_nodes[0]['id'] && this.video_switch == true) {
get_graph() ;
}
else
document.location.href = document.location.href.split("inevent_portal")[0] + "inevent_portal/hyperevent/" + d.id;
}
this.find_node_index = function(node_id) {
for(var i=0; i < this.input_nodes.length; i++){
if(parseInt(this.input_nodes[i].id) == parseInt(node_id)) {
return i;
}
}
return -1;
}
this.addNodeIfUnique = function(candidate_node) {
for(var i=0; i < this.input_nodes.length; i++){
if(parseInt(this.input_nodes[i].id) == parseInt(candidate_node.id)) {
return ;
}
}
this.input_nodes.push(candidate_node) ;
}
this.mouseover = function(d, display_class, title_id){
zoom_in(d, "node" + d.id, "rect" + d.id, this.graph_left, this.graph_width, this.graph_top, this.graph_height, display_class);
display_title(d.title, "video_title" + d.id);
}
this.displayNodes = function() {
var _this = this ;
this.node = this.svg.selectAll(".node").data(this.input_nodes);
this.nodeEnter = this.node.enter().append("svg:g");
this.nodeEnter.attr("id", function(d) { return "node" + d.id;})
.attr("class", "node")
.on("click", function(d) {d3.event.stopPropagation(); _this.mouseover(d,"word_cloud_" + d.id,"video_title" + d.id); })
.call(this.force.drag); //SD/ Enable Drag&Drop
this.defs = this.nodeEnter.append("defs") ;
this.defs.append("rect")
.attr("id", function(d) { return "rect_node" + d.id})
.style("stroke-width", 4)
.style("fill", "none") // Make the nodes hollow looking
.attr("height", this.small_rect[1] - 10)
.attr("width", this.small_rect[0])
.style("stroke", function(d) {
//SD/ Color in black first node only
if(d.depth < 1) { return _this.color.black } else { return _this.color.grey }
})
.attr("rx", "5")
.attr('x', -this.small_rect[0] / 2)
.attr('y', -this.small_rect[1] / 2 + 5);
this.defs.append("svg:clipPath")
.attr("id", function(d) { return "clip" + d.id})
.append("use")
.attr("xlink:href", function(d) { return "#rect_node" + d.id});
this.nodeEnter.append("use")
.attr("xlink:href", function(d) { return "#rect_node" + d.id});
this.nodeEnter.append("image")
.attr("id", function(d) { return "image" + d.id})
.attr("xlink:href", function(d) { return d.snapshot_url })
.attr("height", _this.small_rect[1])
.attr("width", _this.small_rect[0])
.attr("class", "graph_images")
.attr('x', -this.small_rect[0] / 2)
.attr('y', -this.small_rect[1] / 2)
.attr("clip-path", function(d) { return "url(#"+"clip" + d.id +")"}) ;
//SD/ Define window
var rect = this.nodeEnter.append("rect")
.attr("id", function(d) { return "rect"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", this.big_rect[1])
.attr("width", this.big_rect[0])
.style("stroke", function(d) {
//SD/ Color in black first node only
if(d.depth < 1) { return _this.color.black } else { return _this.color.grey }
})
.style("fill","white") // Make the nodes hollow looking
.style("stroke-width", 2) // Give the node strokes some thickness
.attr('x', -this.big_rect[0]/2)
.attr('y', -this.big_rect[1]/2)
.attr('rx', 5)
.attr('ry', 5);
this.nodeEnter.append("image")
.attr("id", function(d) { return "cloud"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("xlink:href", function(d) { return d.snapshot_url })
.attr('x', -this.image_rect[0]/2)
.attr('y', -this.image_rect[1]/2)
.attr("height", this.image_rect[1])
.attr("width", this.image_rect[0]);
//.attr("clip-path", function(d) { return "url(#"+"clip_zoom"+d.id +")"});
/* Adapt node display depending on Web Browser */
var browser = window.navigator.userAgent.toLowerCase();
var version = window.navigator.appVersion;
_this = this ;
if ( (browser.indexOf("firefox")>-1) || ( (browser.indexOf("safari")>-1) && version>6)) {
//Explorers who does not support the use of foreign objects in SVG
this.nodeEnter.append('svg:foreignObject')
.attr("id", function(d) { return "centralize"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", 30)
.attr("width", 80)
.attr("color","#707070")
//.attr("requiredExtensions","http://www.w3.org/1999/xhtml")
.attr('x', -this.image_rect[0]/2)
.attr('y', 90)
.on("click", function(d) { d3.event.stopPropagation(); _this.setCenter(d);})
.append("xhtml:body")
.attr("xmlns","http://www.w3.org/1999/xhtml")
.html('<button class="btn btn-small" value="btn" type="button"> <i class="icon-play"></i> Play</button> ');
this.nodeEnter.append('svg:foreignObject')
.attr("id", function(d) { return "close"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", 30)
.attr("width", 80)
//.attr("requiredExtensions","http://www.w3.org/1999/xhtml")
.attr('x', -this.image_rect[0]/2 + 180)
.attr('y', 90)
.on("click", function(d) {d3.event.stopPropagation(); mouseout(d,"word_cloud_"+d.id); return false})
.append("xhtml:body")
.attr("xmlns","http://www.w3.org/1999/xhtml")
.html('<button class="btn btn-small" value="btn" type="button" ><b>[x]</b> Close</button>');
}
else {
this.nodeEnter.append('image')
.attr("id", function(d) { return "centralize"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", 33)
.attr("width",69)
.attr('x', -this.image_rect[0]/2)
.attr('y', 90)
.on("click", function(d) { d3.event.stopPropagation(); _this.setCenter(d);})
.attr("xlink:href", function(d) { return "/static/inevent/images/play_button.png"});
this.nodeEnter.append('image')
.attr("id", function(d) { return "close"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", 33)
.attr("width", 79)
.attr('x', -this.image_rect[0]/2 + 180)
.attr('y', 90)
.on("click", function(d) {d3.event.stopPropagation(); mouseout(d,"word_cloud_"+d.id); return false})
.attr("xlink:href", function(d) { return "/static/inevent/images/close_button.png"});
}
//SD/ Define title
node_text = this.nodeEnter.append('text')
.attr("id", function(d) { return "video_title"+d.id})
.attr("height", 60)
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("fill","#707070")
.attr("width", this.image_rect[0])
.attr('x', function(d) { return -_this.image_rect[0]/2+10})
.attr('y', function(d) { return -_this.big_rect[1]/2 + 10})
.text("")
//SD/TDOD : What is the purpose of this code ?
this.nodeEnter.append("title")
.text(function(d) { return d.title; });
this.node.exit().remove();
}
this.displayLinks = function() {
_this = this ;
this.link = this.svg.selectAll(".link").data(this.input_links);
this.linkEnter=this.link.enter().append("line")
.attr("class", "link")
.style("stroke-width",function(d) {return d.weight})
.style("stroke", function(d) {
//SD/ Color in black links of first node only
if(d.depth < 2) { return _this.color.black } else { return _this.color.grey }
})
.attr("x1", function(d) { return d.source.x})
.attr("y1", function(d) { return d.source.y})
.attr("x2", function(d) { return d.target.x})
.attr("y2", function(d) { return d.target.y});
this.link.exit().remove();
//SD/ Push lines to background
this.svg.selectAll(".link").moveToBack();
}
this.boundedTick = function() {
node = this.svg.selectAll(".node");
link = this.svg.selectAll(".link");
var g = this;
node.attr("cx", function(d) { return d.x; })
node.attr("cy", function(d) { return d.y; })
// node.attr("cx", function(d) { return d.x = Math.max(g.small_rect[0], Math.min(g.graph_width - g.small_rect[0], d.x)); })
// node.attr("cy", function(d) { return d.y = Math.max(g.small_rect[1] - 10, Math.min(g.graph_height - g.small_rect[1] + 10, d.y)); })
link.attr("x1", function(d) { return d.source.x; })
.attr("y1", function(d) { return d.source.y; })
.attr("x2", function(d) { return d.target.x; })
.attr("y2", function(d) { return d.target.y; });
node.attr("transform", function(d) {
return "translate(" + d.x + "," + d.y + ")";
});
}
this.getExcludedEvent = function() {
return this.excluded ;
}
this.addExclusion = function(event_id) {
if(typeof event_id != undefined)
this.excluded.push(event_id) ;
}
this.setWidth = function(new_width) {
this.width = new_width ;
if (this.margin!=undefined) {
this.graph_width = this.width - this.margin.left - this.margin.right ;
this.svg.attr("width", this.graph_width) ;
this.force.start();
}
}
//SD/ FROM HERE queue management where we could use a jQuery Queue Object (http://api.jquery.com/jquery.queue/)
//SD/ Enqueue if not excluded
this.enQueue = function(nodes) {
if(typeof nodes !== undefined && nodes != null) {
for(var i = 0 ; i < nodes.length ; i++)
this.checkAndQueue(nodes[i]) ;
}
}
this.checkAndQueue = function(node) {
for(var i = 0 ; i < this.excluded.length ; i++)
if(node['id'] == this.excluded[i])
return ;
this.queue.push(node) ;
}
this.deQueue = function() {
this.queue.splice(0, 1) ;
}
this.firstQueue = function() {
return [this.queue[0]] ;
}
this.firstQueueID = function() {
if (this.queue.length > 0)
return this.queue[0]['id'] ;
}
this.sizeQueue = function() {
return this.queue.length ;
}
//SD/ END FROM HERE queue management
}
graph = new Graph();
/*SD/ ==========================================================================
Function called to display graph and get data
==============================================================================*/
function display_graph_error(error) {
graph_data_fetched = false;
$('#graph_button').html("Show as List")
$('#graph').html('<div class="alert alert-error" style ="margin-top:100px;position:relative;margin-bottom:100px"> Unable to load graph. Please try again later.');
$('#graph_button').prop('disabled', false);
}
//SD/ Initiate the first graph and call dynamically next data
function display_graph_head(data, video_switch, max_neighbours, max_depth, max_size) {
graph_data_fetched = true;
video_switch = typeof video_switch !== 'undefined' ? video_switch : false;
max_neighbours = typeof max_neighbours !== 'undefined' ? max_neighbours : 5;
max_depth = typeof max_depth !== 'undefined' ? max_depth : 2;
max_size = typeof max_size !== 'undefined' ? max_size : 100;
//SD/ Create first graph without any data
$('#graph').html("");
position = $('#graph').position();
graph.loadGraph(data, "graph", max_size, max_depth, max_neighbours, $("#graph_container").width(), 700, position['top'], position['left'], video_switch);
//SD/ Prepare queue for nodes
data[0]['depth'] = 0 ;
graph.enQueue(data) ;
//SD/ Get first neighbours
var first = graph.firstQueue() ;
params = {'event_id': graph.firstQueueID(), 'count': 1, 'depth': 1, 'num_of_similar': graph.max_neighbours, 'error_callback': display_graph_error} ;
Dajaxice.inevent.get_graph_neighbours(
function(data){display_graph(data, display_graph);}, params) ;
//$('#graph_button').prop('disabled', false);
}
//SD/ update graph with children data
function display_graph(data, callback) {
if(data['depth'] <= graph.max_depth) //SD/ Check for depth
{
var rest = graph.max_size - graph.input_nodes.length ;
var cuted = false ;
graph.addExclusion(data['caller_id']) ;
if(typeof data['nodes'] !== undefined && data['nodes'] != null) {
//SD/ Remove some neighbours if nodes limit reached
if(data['nodes'].length > rest) {
data['nodes'].splice(rest , data['nodes'].length - rest) ;
cuted = true ;
}
//SD/ Enqueue neighbours
graph.enQueue(data['nodes']) ;
}
//SD/ Graph node and prepare its links for next neighbours
//$('#graph').html("");
graph.updateGraph({'nodes': data['nodes'], 'caller_id': data['caller_id'], 'links': data['links']}) ;
//SD/ Dequeue first node
graph.deQueue() ;
//SD/ Check exclusion for next node
if(graph.sizeQueue() > 0 && !cuted) {
var first = graph.firstQueue() ;
if(callback) {
params = {'event_id': first[0]['id'], 'count': data['count'] + 1, 'depth': first[0]['depth'] + 1, 'num_of_similar': graph.max_neighbours, 'error_callback': display_graph_error} ;
callback(
Dajaxice.inevent.get_graph_neighbours(function(data){display_graph(data, display_graph); }, params)
) ;
}
}
else
console.log("End of queue after exclusion with " + graph.input_nodes.length + " nodes") ;
}
}
//SD/ If windows is resized
$( window ).resize(function() {
//SD/ adapt graph size with container width
graph.setWidth($("#graph_container").width()) ;
});
| static/inevent/scripts/graph.js | // CLASS WIDGET MANAGER
function Graph() {
this.initVars = function() {
this.small_rect = [75.0, 56.0] ;
this.big_rect = [300.0, 250.0] ;
this.image_rect = [this.big_rect[0] - 40, this.big_rect[1] - 70] ;
this.margin = {top: 0, right: 0, bottom: 0, left: 0} ;
this.graph_width = this.width - this.margin.left - this.margin.right ;
this.graph_height = this.height - this.margin.top - this.margin.bottom ;
this.graph_top = this.top;
this.graph_left = this.left;
this.queue = [] ;
this.excluded = [] ; //SD/ to store node who already displays neighbours
this.input_links = [] ;
this.svg = d3.select("#" + this.graph_id).append("svg")
.attr("width", this.graph_width)
.attr("height", this.graph_height);
this.color = { black:"black",
grey:"grey"} ;
}
this.loadGraph = function(data, graph_id, max_size, max_depth, max_neighbours, width, height, top, left, video_switch) {
//SD/ Set parameters in local vars
this.input_nodes = data ;
this.graph_id = graph_id ;
this.height = height ;
this.width = width ;
this.top = top ;
this.left = left ;
this.max_size = max_size ;
this.max_depth = max_depth ;
this.max_neighbours = max_neighbours ;
//SD/ Set optional parameters with default values in local vars
this.video_switch = typeof video_switch !== 'undefined' ? video_switch : false;
this.initVars();
this.force = d3.layout.force()
.linkDistance(150)
.charge(-50)
.gravity(0.01)
.nodes(this.input_nodes)
.links(this.input_links)
.size([this.graph_width, this.graph_height])
.on("tick", this.boundedTick.bind(this))
.start();
this.displayNodes();
this.displayLinks();
}
this.updateGraph = function(new_data) {
_this = this ;
//SD/ Merge graphed nodes with new nodes removing duplicates
if(typeof new_data['nodes'] !== undefined && new_data['nodes'] != null) {
if(new_data['nodes'].length > 0) {
//SD/ Unique isn't working here
//this.input_nodes = $.unique($.merge(this.input_nodes, new_data['nodes'])) ;
for(var i = 0 ; i < new_data['nodes'].length ; i++)
this.addNodeIfUnique(new_data['nodes'][i]) ;
}
}
//SD/ Prepare link and push them to input_links
links = new_data['links'] ;
if(typeof new_data['links'] !== undefined && new_data['links'] != null) {
for (var s = 0; s < links.length; s++) {
target = this.find_node_index(links[s]['target']);
source = this.find_node_index(links[s]['source']);
if (target!=-1 && source!=-1) {
this.input_links.push({
"target":target,
"source":source,
"depth":links[s]['depth'],
"weight":links[s]['weight']
})
}
else{
if (target == -1){
console.log("Missing target node: "+links[s]['target'])
}
if (source == -1){
console.log("Missing source node : "+links[s]['source'])
}
}
}
}
this.displayNodes();
this.displayLinks();
this.force.start();
}
this.setCenter = function(d) {
if(d.id == this.input_nodes[0]['id'] && this.video_switch == true) {
get_graph() ;
}
else
document.location.href = document.location.href.split("inevent_portal")[0] + "inevent_portal/hyperevent/" + d.id;
}
this.find_node_index = function(node_id) {
for(var i=0; i < this.input_nodes.length; i++){
if(parseInt(this.input_nodes[i].id) == parseInt(node_id)) {
return i;
}
}
return -1;
}
this.addNodeIfUnique = function(candidate_node) {
for(var i=0; i < this.input_nodes.length; i++){
if(parseInt(this.input_nodes[i].id) == parseInt(candidate_node.id)) {
return ;
}
}
this.input_nodes.push(candidate_node) ;
}
this.mouseover = function(d, display_class, title_id){
zoom_in(d, "node" + d.id, "rect" + d.id, this.graph_left, this.graph_width, this.graph_top, this.graph_height, display_class);
display_title(d.title, "video_title" + d.id);
}
this.displayNodes = function() {
var _this = this ;
this.node = this.svg.selectAll(".node").data(this.input_nodes);
this.nodeEnter = this.node.enter().append("svg:g");
this.nodeEnter.attr("id", function(d) { return "node" + d.id;})
.attr("class", "node")
.on("click", function(d) {d3.event.stopPropagation(); _this.mouseover(d,"word_cloud_" + d.id,"video_title" + d.id); })
.call(this.force.drag); //SD/ Enable Drag&Drop
this.defs = this.nodeEnter.append("defs") ;
this.defs.append("rect")
.attr("id", function(d) { return "rect_node" + d.id})
.style("stroke-width", 4)
.style("fill", "none") // Make the nodes hollow looking
.attr("height", this.small_rect[1] - 10)
.attr("width", this.small_rect[0])
.style("stroke", function(d) {
//SD/ Color in black first node only
if(d.depth < 1) { return _this.color.black } else { return _this.color.grey }
})
.attr("rx", "5")
.attr('x', -this.small_rect[0] / 2)
.attr('y', -this.small_rect[1] / 2 + 5);
this.defs.append("svg:clipPath")
.attr("id", function(d) { return "clip" + d.id})
.append("use")
.attr("xlink:href", function(d) { return "#rect_node" + d.id});
this.nodeEnter.append("use")
.attr("xlink:href", function(d) { return "#rect_node" + d.id});
this.nodeEnter.append("image")
.attr("id", function(d) { return "image" + d.id})
.attr("xlink:href", function(d) { return d.snapshot_url })
.attr("height", _this.small_rect[1])
.attr("width", _this.small_rect[0])
.attr("class", "graph_images")
.attr('x', -this.small_rect[0] / 2)
.attr('y', -this.small_rect[1] / 2)
.attr("clip-path", function(d) { return "url(#"+"clip" + d.id +")"}) ;
//SD/ Define window
var rect = this.nodeEnter.append("rect")
.attr("id", function(d) { return "rect"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", this.big_rect[1])
.attr("width", this.big_rect[0])
.style("stroke", function(d) {
//SD/ Color in black first node only
if(d.depth < 1) { return _this.color.black } else { return _this.color.grey }
})
.style("fill","white") // Make the nodes hollow looking
.style("stroke-width", 2) // Give the node strokes some thickness
.attr('x', -this.big_rect[0]/2)
.attr('y', -this.big_rect[1]/2)
.attr('rx', 5)
.attr('ry', 5);
this.nodeEnter.append("image")
.attr("id", function(d) { return "cloud"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("xlink:href", function(d) { return d.snapshot_url })
.attr('x', -this.image_rect[0]/2)
.attr('y', -this.image_rect[1]/2)
.attr("height", this.image_rect[1])
.attr("width", this.image_rect[0]);
//.attr("clip-path", function(d) { return "url(#"+"clip_zoom"+d.id +")"});
/* Adapt node display depending on Web Browser */
var browser = window.navigator.userAgent.toLowerCase();
var version = window.navigator.appVersion;
_this = this ;
if ( (browser.indexOf("firefox")>-1) || ( (browser.indexOf("safari")>-1) && version>6)) {
//Explorers who does not support the use of foreign objects in SVG
this.nodeEnter.append('svg:foreignObject')
.attr("id", function(d) { return "centralize"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", 30)
.attr("width", 80)
.attr("color","#707070")
//.attr("requiredExtensions","http://www.w3.org/1999/xhtml")
.attr('x', -this.image_rect[0]/2)
.attr('y', 90)
.on("click", function(d) { d3.event.stopPropagation(); _this.setCenter(d);})
.append("xhtml:body")
.attr("xmlns","http://www.w3.org/1999/xhtml")
.html('<button class="btn btn-small" value="btn" type="button"> <i class="icon-play"></i> Play</button> ');
this.nodeEnter.append('svg:foreignObject')
.attr("id", function(d) { return "close"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", 30)
.attr("width", 80)
//.attr("requiredExtensions","http://www.w3.org/1999/xhtml")
.attr('x', -this.image_rect[0]/2 + 180)
.attr('y', 90)
.on("click", function(d) {d3.event.stopPropagation(); mouseout(d,"word_cloud_"+d.id); return false})
.append("xhtml:body")
.attr("xmlns","http://www.w3.org/1999/xhtml")
.html('<button class="btn btn-small" value="btn" type="button" ><b>[x]</b> Close</button>');
}
else {
this.nodeEnter.append('image')
.attr("id", function(d) { return "centralize"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", 33)
.attr("width",69)
.attr('x', -this.image_rect[0]/2)
.attr('y', 90)
.on("click", function(d) { d3.event.stopPropagation(); _this.setCenter(d);})
.attr("xlink:href", function(d) { return "/static/inevent/images/play_button.png"});
this.nodeEnter.append('image')
.attr("id", function(d) { return "close"+d.id})
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("height", 33)
.attr("width", 79)
.attr('x', -this.image_rect[0]/2 + 180)
.attr('y', 90)
.on("click", function(d) {d3.event.stopPropagation(); mouseout(d,"word_cloud_"+d.id); return false})
.attr("xlink:href", function(d) { return "/static/inevent/images/close_button.png"});
}
//SD/ Define title
node_text = this.nodeEnter.append('text')
.attr("id", function(d) { return "video_title"+d.id})
.attr("height", 60)
.attr("class", function(d) { return "word_cloud"+ " " + "word_cloud_"+d.id})
.attr("fill","#707070")
.attr("width", this.image_rect[0])
.attr('x', function(d) { return -_this.image_rect[0]/2+10})
.attr('y', function(d) { return -_this.big_rect[1]/2 + 10})
.text("")
//SD/TDOD : What is the purpose of this code ?
this.nodeEnter.append("title")
.text(function(d) { return d.title; });
this.node.exit().remove();
}
this.displayLinks = function() {
_this = this ;
this.link = this.svg.selectAll(".link").data(this.input_links);
this.linkEnter=this.link.enter().append("line")
.attr("class", "link")
.style("stroke-width",function(d) {return d.weight})
.style("stroke", function(d) {
//SD/ Color in black links of first node only
if(d.depth < 2) { return _this.color.black } else { return _this.color.grey }
})
.attr("x1", function(d) { return d.source.x})
.attr("y1", function(d) { return d.source.y})
.attr("x2", function(d) { return d.target.x})
.attr("y2", function(d) { return d.target.y});
this.link.exit().remove();
//SD/ Push lines to background
this.svg.selectAll(".link").moveToBack();
}
this.boundedTick = function() {
node = this.svg.selectAll(".node");
link = this.svg.selectAll(".link");
var g = this;
node.attr("cx", function(d) { return d.x; })
node.attr("cy", function(d) { return d.y; })
// node.attr("cx", function(d) { return d.x = Math.max(g.small_rect[0], Math.min(g.graph_width - g.small_rect[0], d.x)); })
// node.attr("cy", function(d) { return d.y = Math.max(g.small_rect[1] - 10, Math.min(g.graph_height - g.small_rect[1] + 10, d.y)); })
link.attr("x1", function(d) { return d.source.x; })
.attr("y1", function(d) { return d.source.y; })
.attr("x2", function(d) { return d.target.x; })
.attr("y2", function(d) { return d.target.y; });
node.attr("transform", function(d) {
return "translate(" + d.x + "," + d.y + ")";
});
}
this.getExcludedEvent = function() {
return this.excluded ;
}
this.addExclusion = function(event_id) {
if(typeof event_id != undefined)
this.excluded.push(event_id) ;
}
this.setWidth = function(new_width) {
this.width = new_width ;
this.graph_width = this.width - this.margin.left - this.margin.right ;
this.svg.attr("width", this.graph_width) ;
this.force.start();
}
//SD/ FROM HERE queue management where we could use a jQuery Queue Object (http://api.jquery.com/jquery.queue/)
//SD/ Enqueue if not excluded
this.enQueue = function(nodes) {
if(typeof nodes !== undefined && nodes != null) {
for(var i = 0 ; i < nodes.length ; i++)
this.checkAndQueue(nodes[i]) ;
}
}
this.checkAndQueue = function(node) {
for(var i = 0 ; i < this.excluded.length ; i++)
if(node['id'] == this.excluded[i])
return ;
this.queue.push(node) ;
}
this.deQueue = function() {
this.queue.splice(0, 1) ;
}
this.firstQueue = function() {
return [this.queue[0]] ;
}
this.firstQueueID = function() {
if (this.queue.length > 0)
return this.queue[0]['id'] ;
}
this.sizeQueue = function() {
return this.queue.length ;
}
//SD/ END FROM HERE queue management
}
graph = new Graph();
/*SD/ ==========================================================================
Function called to display graph and get data
==============================================================================*/
function display_graph_error(error) {
graph_data_fetched = false;
$('#graph_button').html("Show as List")
$('#graph').html('<div class="alert alert-error" style ="margin-top:100px;position:relative;margin-bottom:100px"> Unable to load graph. Please try again later.');
$('#graph_button').prop('disabled', false);
}
//SD/ Initiate the first graph and call dynamically next data
function display_graph_head(data, video_switch, max_neighbours, max_depth, max_size) {
graph_data_fetched = true;
video_switch = typeof video_switch !== 'undefined' ? video_switch : false;
max_neighbours = typeof max_neighbours !== 'undefined' ? max_neighbours : 5;
max_depth = typeof max_depth !== 'undefined' ? max_depth : 2;
max_size = typeof max_size !== 'undefined' ? max_size : 100;
//SD/ Create first graph without any data
$('#graph').html("");
position = $('#graph').position();
graph.loadGraph(data, "graph", max_size, max_depth, max_neighbours, $("#graph_container").width(), 700, position['top'], position['left'], video_switch);
//SD/ Prepare queue for nodes
data[0]['depth'] = 0 ;
graph.enQueue(data) ;
//SD/ Get first neighbours
var first = graph.firstQueue() ;
params = {'event_id': graph.firstQueueID(), 'count': 1, 'depth': 1, 'num_of_similar': graph.max_neighbours, 'error_callback': display_graph_error} ;
Dajaxice.inevent.get_graph_neighbours(
function(data){display_graph(data, display_graph);}, params) ;
//$('#graph_button').prop('disabled', false);
}
//SD/ update graph with children data
function display_graph(data, callback) {
if(data['depth'] <= graph.max_depth) //SD/ Check for depth
{
var rest = graph.max_size - graph.input_nodes.length ;
var cuted = false ;
graph.addExclusion(data['caller_id']) ;
if(typeof data['nodes'] !== undefined && data['nodes'] != null) {
//SD/ Remove some neighbours if nodes limit reached
if(data['nodes'].length > rest) {
data['nodes'].splice(rest , data['nodes'].length - rest) ;
cuted = true ;
}
//SD/ Enqueue neighbours
graph.enQueue(data['nodes']) ;
}
//SD/ Graph node and prepare its links for next neighbours
//$('#graph').html("");
graph.updateGraph({'nodes': data['nodes'], 'caller_id': data['caller_id'], 'links': data['links']}) ;
//SD/ Dequeue first node
graph.deQueue() ;
//SD/ Check exclusion for next node
if(graph.sizeQueue() > 0 && !cuted) {
var first = graph.firstQueue() ;
if(callback) {
params = {'event_id': first[0]['id'], 'count': data['count'] + 1, 'depth': first[0]['depth'] + 1, 'num_of_similar': graph.max_neighbours, 'error_callback': display_graph_error} ;
callback(
Dajaxice.inevent.get_graph_neighbours(function(data){display_graph(data, display_graph); }, params)
) ;
}
}
else
console.log("End of queue after exclusion with " + graph.input_nodes.length + " nodes") ;
}
}
//SD/ If windows is resized
$( window ).resize(function() {
//SD/ adapt graph size with container width
graph.setWidth($("#graph_container").width()) ;
});
| resize only when graph initialized
| static/inevent/scripts/graph.js | resize only when graph initialized | <ide><path>tatic/inevent/scripts/graph.js
<ide>
<ide> this.setWidth = function(new_width) {
<ide> this.width = new_width ;
<del> this.graph_width = this.width - this.margin.left - this.margin.right ;
<del>
<del> this.svg.attr("width", this.graph_width) ;
<del> this.force.start();
<add> if (this.margin!=undefined) {
<add> this.graph_width = this.width - this.margin.left - this.margin.right ;
<add>
<add> this.svg.attr("width", this.graph_width) ;
<add> this.force.start();
<add> }
<add>
<ide> }
<ide>
<ide> //SD/ FROM HERE queue management where we could use a jQuery Queue Object (http://api.jquery.com/jquery.queue/) |
|
Java | mit | b5737ff0a6ac5ce4dfd115a3a04a2fa9d34b2eee | 0 | DMDirc/DMDirc,csmith/DMDirc,ShaneMcC/DMDirc-Client,ShaneMcC/DMDirc-Client,greboid/DMDirc,greboid/DMDirc,csmith/DMDirc,DMDirc/DMDirc,csmith/DMDirc,ShaneMcC/DMDirc-Client,greboid/DMDirc,DMDirc/DMDirc,csmith/DMDirc,greboid/DMDirc,DMDirc/DMDirc,ShaneMcC/DMDirc-Client | /*
* Copyright (c) 2006-2007 Chris Smith, Shane Mc Cormack, Gregory Holmes
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.dmdirc.ui.swing.dialogs.sslcertificate;
import com.dmdirc.harness.ui.ClassFinder;
import com.dmdirc.harness.ui.TestSSLCertificateDialogModel;
import com.dmdirc.harness.ui.UIClassTestRunner;
import com.dmdirc.harness.ui.UITestIface;
import com.dmdirc.ui.IconManager;
import com.dmdirc.ui.swing.UIUtilities;
import java.awt.Component;
import java.util.Arrays;
import javax.swing.Icon;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JScrollPane;
import javax.swing.border.TitledBorder;
import org.fest.swing.core.EventMode;
import org.fest.swing.driver.BasicJListCellReader;
import org.fest.swing.fixture.DialogFixture;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
@RunWith(UIClassTestRunner.class)
public class SSLCertificateDialogTest implements UITestIface {
private DialogFixture window;
@Before
public void setUp() {
UIUtilities.initUISettings();
}
@After
public void tearDown() {
if (window != null) {
window.cleanUp();
}
}
@Test
public void testTicksAndCrosses() {
setupWindow();
assertTrue(Arrays.equals(new String[]{
"first cert",
"second cert",
"invalid cert",
"trusted cert",
"invalid+trusted"
}, window.list().contents()));
assertTrue(Arrays.equals(new String[]{
"nothing",
"nothing",
"cross",
"tick",
"cross"
},window.list().cellReader(new CertificateListCellReader()).contents()));
}
@Test
public void testSelection() throws InterruptedException {
setupWindow();
window.list().requireSelection("first cert");
//Thread.sleep(10000);
for (String cert : window.list().contents()) {
window.list().selectItem(cert).requireSelection(cert);
assertEquals("Information for " + cert, ((TitledBorder) window
.scrollPane(new ClassFinder<JScrollPane>(CertificateInfoPanel.class, null))
.target.getBorder()).getTitle());
}
}
protected void setupWindow() {
window = new DialogFixture(new SSLCertificateDialog(null,
new TestSSLCertificateDialogModel()));
window.robot.settings().eventMode(EventMode.AWT);
window.show();
}
public static junit.framework.Test suite() {
return new junit.framework.JUnit4TestAdapter(SSLCertificateDialogTest.class);
}
private static class CertificateListCellReader extends BasicJListCellReader {
public String valueAt(JList arg0, int arg1) {
final Component c = cellRendererComponent(arg0, arg1);
final Icon target = ((JLabel) c).getIcon();
for (String icon : new String[]{"tick", "cross", "nothing"}) {
if (target == IconManager.getIconManager().getIcon(icon)) {
return icon;
}
}
return "?";
}
}
}
| test/com/dmdirc/ui/swing/dialogs/sslcertificate/SSLCertificateDialogTest.java | /*
* Copyright (c) 2006-2007 Chris Smith, Shane Mc Cormack, Gregory Holmes
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.dmdirc.ui.swing.dialogs.sslcertificate;
import com.dmdirc.harness.ui.ClassFinder;
import com.dmdirc.harness.ui.TestSSLCertificateDialogModel;
import com.dmdirc.harness.ui.UIClassTestRunner;
import com.dmdirc.harness.ui.UITestIface;
import com.dmdirc.ui.IconManager;
import com.dmdirc.ui.swing.UIUtilities;
import java.awt.Component;
import java.util.Arrays;
import javax.swing.Icon;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JScrollPane;
import javax.swing.border.TitledBorder;
import org.fest.swing.core.EventMode;
import org.fest.swing.driver.BasicJListCellReader;
import org.fest.swing.fixture.DialogFixture;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
@RunWith(UIClassTestRunner.class)
public class SSLCertificateDialogTest implements UITestIface {
private DialogFixture window;
@Before
public void setUp() {
UIUtilities.initUISettings();
}
@After
public void tearDown() {
if (window != null) {
window.cleanUp();
}
}
@Test
public void testTicksAndCrosses() {
setupWindow();
assertTrue(Arrays.equals(new String[]{
"first cert",
"second cert",
"invalid cert",
"trusted cert",
"invalid+trusted"
}, window.list().contents()));
assertTrue(Arrays.equals(new String[]{
"nothing",
"nothing",
"cross",
"tick",
"cross"
},window.list().cellReader(new CertificateListCellReader()).contents()));
}
@Test @Ignore
public void testSelection() throws InterruptedException {
setupWindow();
window.list().requireSelection("first cert");
Thread.sleep(10000);
for (String cert : window.list().contents()) {
window.list().selectItem(cert).requireSelection(cert);
assertEquals("Information for " + cert, ((TitledBorder) window
.scrollPane(new ClassFinder<JScrollPane>(CertificateInfoPanel.class, null))
.target.getBorder()).getTitle());
}
}
protected void setupWindow() {
window = new DialogFixture(new SSLCertificateDialog(null,
new TestSSLCertificateDialogModel()));
window.robot.settings().eventMode(EventMode.AWT);
window.show();
}
public static junit.framework.Test suite() {
return new junit.framework.JUnit4TestAdapter(SSLCertificateDialogTest.class);
}
private static class CertificateListCellReader extends BasicJListCellReader {
public String valueAt(JList arg0, int arg1) {
final Component c = cellRendererComponent(arg0, arg1);
final Icon target = ((JLabel) c).getIcon();
for (String icon : new String[]{"tick", "cross", "nothing"}) {
if (target == IconManager.getIconManager().getIcon(icon)) {
return icon;
}
}
return "?";
}
}
}
| enabled unit test on SSL certificate dialog
git-svn-id: 50f83ef66c13f323b544ac924010c921a9f4a0f7@4815 00569f92-eb28-0410-84fd-f71c24880f43
| test/com/dmdirc/ui/swing/dialogs/sslcertificate/SSLCertificateDialogTest.java | enabled unit test on SSL certificate dialog | <ide><path>est/com/dmdirc/ui/swing/dialogs/sslcertificate/SSLCertificateDialogTest.java
<ide> },window.list().cellReader(new CertificateListCellReader()).contents()));
<ide> }
<ide>
<del> @Test @Ignore
<add> @Test
<ide> public void testSelection() throws InterruptedException {
<ide> setupWindow();
<ide>
<ide> window.list().requireSelection("first cert");
<ide>
<del> Thread.sleep(10000);
<add> //Thread.sleep(10000);
<ide>
<ide> for (String cert : window.list().contents()) {
<ide> window.list().selectItem(cert).requireSelection(cert); |
|
JavaScript | apache-2.0 | a94a8572364faa41ca884090fb9c88956dec0e98 | 0 | jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2 | /**
* @license
* Copyright 2020 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
foam.CLASS({
package: 'foam.nanos.crunch.ui',
name: 'CapabilityWizardlet',
extends: 'foam.u2.wizard.BaseWizardlet',
implements: [
'foam.mlang.Expressions'
],
imports: [
'crunchController',
'localeDAO'
],
properties: [
// Properties specific to CapabilityWizardSection
{
name: 'capability'
},
{
name: 'ucj'
},
// Properties for WizardSection interface
{
name: 'of',
class: 'Class',
expression: function(capability) {
if ( ! capability || ! capability.of ) return null;
return capability.of;
}
},
{
name: 'data',
flags: ['web'],
factory: function() {
if ( ! this.of ) return null;
var ret = this.of.getAxiomByName('capability') ?
this.of.create({ capability: this.capability }, this) :
this.of.create({}, this);
if ( this.ucj === null ) return ret;
ret = Object.assign(ret, this.ucj.data);
return ret;
}
},
{
name: 'title',
class: 'String',
}
],
methods: [
{
name: 'save',
code: function() {
return this.crunchController && this.crunchController.save(this);
}
}
]
});
| src/foam/nanos/crunch/ui/CapabilityWizardlet.js | /**
* @license
* Copyright 2020 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
foam.CLASS({
package: 'foam.nanos.crunch.ui',
name: 'CapabilityWizardlet',
extends: 'foam.u2.wizard.BaseWizardlet',
implements: [
'foam.mlang.Expressions'
],
imports: [
'crunchController',
'localeDAO'
],
properties: [
// Properties specific to CapabilityWizardSection
{
name: 'capability',
postSet: function() {
var self = this;
return this.localeDAO.where(
this.AND(
this.OR(
this.EQ(foam.i18n.Locale.LOCALE, foam.locale),
this.EQ(foam.i18n.Locale.LOCALE, foam.locale.substring(0,foam.locale.indexOf('-')))),
this.EQ(foam.i18n.Locale.ID, this.capability.id + '.name')))
.select().then(function(a){
let arr = a.array;
if ( arr.length > 0 ) {
let ea = arr[0];
self.title = ea.target;
} else
self.title = self.capability.name;
})
.catch(function() {
self.title = self.capability.name;
});
}
},
{
name: 'ucj'
},
// Properties for WizardSection interface
{
name: 'of',
class: 'Class',
expression: function(capability) {
if ( ! capability || ! capability.of ) return null;
return capability.of;
}
},
{
name: 'data',
flags: ['web'],
factory: function() {
if ( ! this.of ) return null;
var ret = this.of.getAxiomByName('capability') ?
this.of.create({ capability: this.capability }, this) :
this.of.create({}, this);
if ( this.ucj === null ) return ret;
ret = Object.assign(ret, this.ucj.data);
return ret;
}
},
{
name: 'title',
class: 'String',
}
],
methods: [
{
name: 'save',
code: function() {
return this.crunchController && this.crunchController.save(this);
}
}
]
});
| Remove manual translation from CapabilityWizardlet.
| src/foam/nanos/crunch/ui/CapabilityWizardlet.js | Remove manual translation from CapabilityWizardlet. | <ide><path>rc/foam/nanos/crunch/ui/CapabilityWizardlet.js
<ide> properties: [
<ide> // Properties specific to CapabilityWizardSection
<ide> {
<del> name: 'capability',
<del> postSet: function() {
<del> var self = this;
<del> return this.localeDAO.where(
<del> this.AND(
<del> this.OR(
<del> this.EQ(foam.i18n.Locale.LOCALE, foam.locale),
<del> this.EQ(foam.i18n.Locale.LOCALE, foam.locale.substring(0,foam.locale.indexOf('-')))),
<del> this.EQ(foam.i18n.Locale.ID, this.capability.id + '.name')))
<del> .select().then(function(a){
<del> let arr = a.array;
<del> if ( arr.length > 0 ) {
<del> let ea = arr[0];
<del> self.title = ea.target;
<del> } else
<del> self.title = self.capability.name;
<del> })
<del> .catch(function() {
<del> self.title = self.capability.name;
<del> });
<del> }
<add> name: 'capability'
<ide> },
<ide> {
<ide> name: 'ucj' |
|
Java | apache-2.0 | 03f898682283cd58244c072bc1b877a24f7358b1 | 0 | robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,gstevey/gradle,robinverduijn/gradle,lsmaira/gradle,lsmaira/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gstevey/gradle,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,lsmaira/gradle,gradle/gradle,gradle/gradle,gstevey/gradle,gstevey/gradle,gstevey/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle,gstevey/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,gstevey/gradle,lsmaira/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,gstevey/gradle,blindpirate/gradle,lsmaira/gradle,robinverduijn/gradle,robinverduijn/gradle | /*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.platform.base;
import org.gradle.api.Incubating;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Declares the tasks to build a custom {@link org.gradle.platform.base.BinarySpec} binary.
*
* The following example demonstrates how to register multiple tasks for custom binary using a plugin with a
* {@link org.gradle.platform.base.BinaryTasks} annotation.
*
* <pre autoTested='true'>
* {@literal @}Managed interface SampleComponent extends ComponentSpec {}
* {@literal @}Managed interface SampleBinary extends BinarySpec {}
*
* apply plugin: MyCustomBinariesPlugin
*
* class MyCustomBinaryCreationTask extends DefaultTask {
* {@literal @}TaskAction void build() {
* //building the binary
* }
* }
*
* class MyCustomBinariesPlugin extends RuleSource {
* {@literal @}BinaryType
* void register(BinaryTypeBuilder<SampleBinary> builder) {}
*
* {@literal @}BinaryTasks
* void createBinaryTasks(ModelMap<Task> tasks, SampleBinary binary) {
* tasks.create("${binary.name}Task1", MyCustomBinaryCreationTask)
* tasks.create("${binary.name}Task2") {
* dependsOn "${binary.name}Task1"
* }
* }
* }
* </pre>
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Incubating
public @interface BinaryTasks {
}
| subprojects/platform-base/src/main/java/org/gradle/platform/base/BinaryTasks.java | /*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.platform.base;
import org.gradle.api.Incubating;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Declares the tasks to build a custom {@link org.gradle.platform.base.BinarySpec} binary.
*
* The following example demonstrates how to register multiple tasks for custom binary using a plugin with a
* {@link org.gradle.platform.base.BinaryTasks} annotation.
*
* <pre autoTested='true'>
* @Managed interface SampleComponent extends ComponentSpec {}
* @Managed interface SampleBinary extends BinarySpec {}
*
* apply plugin: MyCustomBinariesPlugin
*
* class MyCustomBinaryCreationTask extends DefaultTask {
* {@literal @}TaskAction void build() {
* //building the binary
* }
* }
*
* class MyCustomBinariesPlugin extends RuleSource {
* {@literal @}BinaryType
* void register(BinaryTypeBuilder<SampleBinary> builder) {}
*
* {@literal @}BinaryTasks
* void createBinaryTasks(ModelMap<Task> tasks, SampleBinary binary) {
* tasks.create("${binary.name}Task1", MyCustomBinaryCreationTask)
* tasks.create("${binary.name}Task2") {
* dependsOn "${binary.name}Task1"
* }
* }
* }
* </pre>
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
@Incubating
public @interface BinaryTasks {
}
| Fix javadoc markup
+review REVIEW-5783
| subprojects/platform-base/src/main/java/org/gradle/platform/base/BinaryTasks.java | Fix javadoc markup | <ide><path>ubprojects/platform-base/src/main/java/org/gradle/platform/base/BinaryTasks.java
<ide> * {@link org.gradle.platform.base.BinaryTasks} annotation.
<ide> *
<ide> * <pre autoTested='true'>
<del> * @Managed interface SampleComponent extends ComponentSpec {}
<del> * @Managed interface SampleBinary extends BinarySpec {}
<add> * {@literal @}Managed interface SampleComponent extends ComponentSpec {}
<add> * {@literal @}Managed interface SampleBinary extends BinarySpec {}
<ide> *
<ide> * apply plugin: MyCustomBinariesPlugin
<ide> * |
|
Java | mit | f0aaefdaa7c480f67cf8ac9bbc82546815de7de6 | 0 | AgriCraft/AgriCraft,CodesCubesAndCrashes/AgriCraft,InfinityRaider/AgriCraft | package com.infinityraider.agricraft.items;
import com.agricraft.agricore.core.AgriCore;
import com.infinityraider.agricraft.api.v1.AgriApi;
import com.infinityraider.agricraft.init.AgriBlocks;
import com.infinityraider.agricraft.items.tabs.AgriTabs;
import com.infinityraider.agricraft.reference.AgriCraftConfig;
import com.infinityraider.agricraft.tiles.TileEntityCrop;
import com.infinityraider.agricraft.utility.StackHelper;
import com.infinityraider.infinitylib.item.IItemWithModel;
import com.infinityraider.infinitylib.item.ItemBase;
import com.infinityraider.infinitylib.utility.IRecipeRegister;
import com.infinityraider.infinitylib.utility.WorldHelper;
import net.minecraft.block.SoundType;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumActionResult;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.registry.GameRegistry;
import net.minecraftforge.oredict.ShapedOreRecipe;
public class ItemCrop extends ItemBase implements IItemWithModel, IRecipeRegister {
public ItemCrop() {
super("crop_sticks");
this.setCreativeTab(AgriTabs.TAB_AGRICRAFT);
}
//I'm overriding this just to be sure
@Override
public boolean canItemEditBlocks() {
return true;
}
// This is called when you right click with this item in hand.
@Override
public EnumActionResult onItemUse(ItemStack stack, EntityPlayer player, World world, BlockPos pos, EnumHand hand, EnumFacing side, float hitX, float hitY, float hitZ) {
// Skip if remote.
if (world.isRemote) {
return EnumActionResult.PASS;
}
// Calculate the target position.
final BlockPos cropPos = pos.offset(side);
// Test if placement is valid.
if (!world.isAirBlock(cropPos)) {
return EnumActionResult.FAIL;
}
// Test if soil is valid.
if (!AgriApi.getSoilRegistry().contains(world.getBlockState(cropPos.down()))) {
return EnumActionResult.FAIL;
}
// Set the block to a crop.
final Boolean success = world.setBlockState(cropPos, AgriBlocks.getInstance().CROP.getDefaultState());
// If there was trouble, abort.
if (!success) {
AgriCore.getCoreLogger().error("ItemCrop#onItemUse failed to create the BlockCrop!");
return EnumActionResult.FAIL;
}
// Remove the crop used from the stack.
StackHelper.decreaseStackSize(player, stack,1);
// Handle sneak placing of crosscrops.
if (player.isSneaking() && stack.stackSize > 0) {
WorldHelper
.getTile(world, cropPos, TileEntityCrop.class)
.ifPresent(c -> {
c.setCrossCrop(true);
StackHelper.decreaseStackSize(player, stack,1);
});
}
// Play placement sound.
SoundType type = Blocks.LEAVES.getSoundType();
world.playSound(null, (double) ((float) cropPos.getX() + 0.5F), (double) ((float) cropPos.getY() + 0.5F), (double) ((float) cropPos.getZ() + 0.5F), type.getPlaceSound(), SoundCategory.PLAYERS, (type.getVolume() + 1.0F) / 4.0F, type.getPitch() * 0.8F);
// Action was a success.
return EnumActionResult.SUCCESS;
}
@Override
public void registerRecipes() {
GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(this, AgriCraftConfig.cropsPerCraft), "ss", "ss", 's', "stickWood"));
}
}
| src/main/java/com/infinityraider/agricraft/items/ItemCrop.java | package com.infinityraider.agricraft.items;
import com.agricraft.agricore.core.AgriCore;
import com.infinityraider.agricraft.api.v1.AgriApi;
import com.infinityraider.agricraft.init.AgriBlocks;
import com.infinityraider.agricraft.items.tabs.AgriTabs;
import com.infinityraider.agricraft.reference.AgriCraftConfig;
import com.infinityraider.agricraft.tiles.TileEntityCrop;
import com.infinityraider.infinitylib.item.IItemWithModel;
import com.infinityraider.infinitylib.item.ItemBase;
import com.infinityraider.infinitylib.utility.IRecipeRegister;
import com.infinityraider.infinitylib.utility.WorldHelper;
import net.minecraft.block.SoundType;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumActionResult;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.registry.GameRegistry;
import net.minecraftforge.oredict.ShapedOreRecipe;
public class ItemCrop extends ItemBase implements IItemWithModel, IRecipeRegister {
public ItemCrop() {
super("crop_sticks");
this.setCreativeTab(AgriTabs.TAB_AGRICRAFT);
}
//I'm overriding this just to be sure
@Override
public boolean canItemEditBlocks() {
return true;
}
// This is called when you right click with this item in hand.
@Override
public EnumActionResult onItemUse(ItemStack stack, EntityPlayer player, World world, BlockPos pos, EnumHand hand, EnumFacing side, float hitX, float hitY, float hitZ) {
// Skip if remote.
if (world.isRemote) {
return EnumActionResult.PASS;
}
// Calculate the target position.
final BlockPos cropPos = pos.offset(side);
// Test if placement is valid.
if (!world.isAirBlock(cropPos)) {
return EnumActionResult.FAIL;
}
// Test if soil is valid.
if (!AgriApi.getSoilRegistry().contains(world.getBlockState(cropPos.down()))) {
return EnumActionResult.FAIL;
}
// Set the block to a crop.
final Boolean success = world.setBlockState(cropPos, AgriBlocks.getInstance().CROP.getDefaultState());
// If there was trouble, abort.
if (!success) {
AgriCore.getCoreLogger().error("ItemCrop#onItemUse failed to create the BlockCrop!");
return EnumActionResult.FAIL;
}
// Remove the crop used from the stack.
stack.stackSize = player.capabilities.isCreativeMode ? stack.stackSize : stack.stackSize - 1;
// Handle sneak placing of crosscrops.
if (player.isSneaking() && stack.stackSize > 0) {
WorldHelper
.getTile(world, cropPos, TileEntityCrop.class)
.ifPresent(c -> {
c.setCrossCrop(true);
stack.stackSize = player.capabilities.isCreativeMode ? stack.stackSize : stack.stackSize - 1;
});
}
// Play placement sound.
SoundType type = Blocks.LEAVES.getSoundType();
world.playSound(null, (double) ((float) cropPos.getX() + 0.5F), (double) ((float) cropPos.getY() + 0.5F), (double) ((float) cropPos.getZ() + 0.5F), type.getPlaceSound(), SoundCategory.PLAYERS, (type.getVolume() + 1.0F) / 4.0F, type.getPitch() * 0.8F);
// Action was a success.
return EnumActionResult.SUCCESS;
}
@Override
public void registerRecipes() {
GameRegistry.addRecipe(new ShapedOreRecipe(new ItemStack(this, AgriCraftConfig.cropsPerCraft), "ss", "ss", 's', "stickWood"));
}
}
| Make ItemCrop#onItemUse use the StackHelper
The helper method is cleaner and makes sure the checks are done right.
It also helps prevent NPEs from having a null player, like if ItemCrop
is used by a machine.
| src/main/java/com/infinityraider/agricraft/items/ItemCrop.java | Make ItemCrop#onItemUse use the StackHelper | <ide><path>rc/main/java/com/infinityraider/agricraft/items/ItemCrop.java
<ide> import com.infinityraider.agricraft.items.tabs.AgriTabs;
<ide> import com.infinityraider.agricraft.reference.AgriCraftConfig;
<ide> import com.infinityraider.agricraft.tiles.TileEntityCrop;
<add>import com.infinityraider.agricraft.utility.StackHelper;
<ide> import com.infinityraider.infinitylib.item.IItemWithModel;
<ide> import com.infinityraider.infinitylib.item.ItemBase;
<ide> import com.infinityraider.infinitylib.utility.IRecipeRegister;
<ide> }
<ide>
<ide> // Remove the crop used from the stack.
<del> stack.stackSize = player.capabilities.isCreativeMode ? stack.stackSize : stack.stackSize - 1;
<add> StackHelper.decreaseStackSize(player, stack,1);
<ide>
<ide> // Handle sneak placing of crosscrops.
<ide> if (player.isSneaking() && stack.stackSize > 0) {
<ide> .getTile(world, cropPos, TileEntityCrop.class)
<ide> .ifPresent(c -> {
<ide> c.setCrossCrop(true);
<del> stack.stackSize = player.capabilities.isCreativeMode ? stack.stackSize : stack.stackSize - 1;
<add> StackHelper.decreaseStackSize(player, stack,1);
<ide> });
<ide> }
<ide> |
|
Java | apache-2.0 | 05f951de5b4a3b6c04a87c05512c2512a26a35fe | 0 | pminutillo/pentaho-kettle,nicoben/pentaho-kettle,matrix-stone/pentaho-kettle,ma459006574/pentaho-kettle,akhayrutdinov/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,roboguy/pentaho-kettle,stepanovdg/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,lgrill-pentaho/pentaho-kettle,wseyler/pentaho-kettle,jbrant/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,flbrino/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,yshakhau/pentaho-kettle,birdtsai/pentaho-kettle,akhayrutdinov/pentaho-kettle,airy-ict/pentaho-kettle,ddiroma/pentaho-kettle,rmansoor/pentaho-kettle,andrei-viaryshka/pentaho-kettle,stevewillcock/pentaho-kettle,ViswesvarSekar/pentaho-kettle,denisprotopopov/pentaho-kettle,mbatchelor/pentaho-kettle,EcoleKeine/pentaho-kettle,Advent51/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,matrix-stone/pentaho-kettle,nanata1115/pentaho-kettle,bmorrise/pentaho-kettle,denisprotopopov/pentaho-kettle,drndos/pentaho-kettle,graimundo/pentaho-kettle,hudak/pentaho-kettle,drndos/pentaho-kettle,mkambol/pentaho-kettle,skofra0/pentaho-kettle,ma459006574/pentaho-kettle,hudak/pentaho-kettle,roboguy/pentaho-kettle,EcoleKeine/pentaho-kettle,gretchiemoran/pentaho-kettle,gretchiemoran/pentaho-kettle,HiromuHota/pentaho-kettle,mbatchelor/pentaho-kettle,Advent51/pentaho-kettle,ma459006574/pentaho-kettle,marcoslarsen/pentaho-kettle,ViswesvarSekar/pentaho-kettle,aminmkhan/pentaho-kettle,hudak/pentaho-kettle,pavel-sakun/pentaho-kettle,pentaho/pentaho-kettle,CapeSepias/pentaho-kettle,bmorrise/pentaho-kettle,flbrino/pentaho-kettle,pentaho/pentaho-kettle,e-cuellar/pentaho-kettle,mbatchelor/pentaho-kettle,skofra0/pentaho-kettle,airy-ict/pentaho-kettle,brosander/pentaho-kettle,matthewtckr/pentaho-kettle,mkambol/pentaho-kettle,YuryBY/pentaho-kettle,graimundo/pentaho-kettle,GauravAshara/pentaho-kettle,mattyb149/pentaho-kettle,tkafalas/pentaho-kettle,CapeSepias/pentaho-kettle,sajeetharan/pentaho-kettle,mattyb149/pentaho-kettle,eayoungs/pentaho-kettle,airy-ict/pentaho-kettle,codek/pentaho-kettle,mdamour1976/pentaho-kettle,DFieldFL/pentaho-kettle,kurtwalker/pentaho-kettle,ivanpogodin/pentaho-kettle,alina-ipatina/pentaho-kettle,alina-ipatina/pentaho-kettle,tkafalas/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,hudak/pentaho-kettle,ivanpogodin/pentaho-kettle,cjsonger/pentaho-kettle,codek/pentaho-kettle,gretchiemoran/pentaho-kettle,marcoslarsen/pentaho-kettle,HiromuHota/pentaho-kettle,pedrofvteixeira/pentaho-kettle,eayoungs/pentaho-kettle,nantunes/pentaho-kettle,gretchiemoran/pentaho-kettle,nicoben/pentaho-kettle,wseyler/pentaho-kettle,ddiroma/pentaho-kettle,ccaspanello/pentaho-kettle,lgrill-pentaho/pentaho-kettle,rmansoor/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,EcoleKeine/pentaho-kettle,ccaspanello/pentaho-kettle,cjsonger/pentaho-kettle,dkincade/pentaho-kettle,dkincade/pentaho-kettle,matthewtckr/pentaho-kettle,flbrino/pentaho-kettle,pymjer/pentaho-kettle,HiromuHota/pentaho-kettle,alina-ipatina/pentaho-kettle,ddiroma/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,emartin-pentaho/pentaho-kettle,mdamour1976/pentaho-kettle,brosander/pentaho-kettle,matthewtckr/pentaho-kettle,dkincade/pentaho-kettle,pymjer/pentaho-kettle,rmansoor/pentaho-kettle,stepanovdg/pentaho-kettle,e-cuellar/pentaho-kettle,lgrill-pentaho/pentaho-kettle,SergeyTravin/pentaho-kettle,CapeSepias/pentaho-kettle,CapeSepias/pentaho-kettle,stevewillcock/pentaho-kettle,MikhailHubanau/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,yshakhau/pentaho-kettle,pminutillo/pentaho-kettle,rfellows/pentaho-kettle,aminmkhan/pentaho-kettle,nanata1115/pentaho-kettle,rmansoor/pentaho-kettle,ccaspanello/pentaho-kettle,matrix-stone/pentaho-kettle,DFieldFL/pentaho-kettle,stepanovdg/pentaho-kettle,EcoleKeine/pentaho-kettle,roboguy/pentaho-kettle,pedrofvteixeira/pentaho-kettle,skofra0/pentaho-kettle,codek/pentaho-kettle,tmcsantos/pentaho-kettle,sajeetharan/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,sajeetharan/pentaho-kettle,ma459006574/pentaho-kettle,emartin-pentaho/pentaho-kettle,dkincade/pentaho-kettle,flbrino/pentaho-kettle,akhayrutdinov/pentaho-kettle,codek/pentaho-kettle,marcoslarsen/pentaho-kettle,drndos/pentaho-kettle,aminmkhan/pentaho-kettle,pminutillo/pentaho-kettle,pminutillo/pentaho-kettle,zlcnju/kettle,AlexanderBuloichik/pentaho-kettle,GauravAshara/pentaho-kettle,mattyb149/pentaho-kettle,roboguy/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,ddiroma/pentaho-kettle,alina-ipatina/pentaho-kettle,pedrofvteixeira/pentaho-kettle,GauravAshara/pentaho-kettle,pentaho/pentaho-kettle,marcoslarsen/pentaho-kettle,pymjer/pentaho-kettle,matrix-stone/pentaho-kettle,akhayrutdinov/pentaho-kettle,cjsonger/pentaho-kettle,ViswesvarSekar/pentaho-kettle,ivanpogodin/pentaho-kettle,kurtwalker/pentaho-kettle,zlcnju/kettle,graimundo/pentaho-kettle,mdamour1976/pentaho-kettle,wseyler/pentaho-kettle,cjsonger/pentaho-kettle,pedrofvteixeira/pentaho-kettle,lgrill-pentaho/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,jbrant/pentaho-kettle,pavel-sakun/pentaho-kettle,nanata1115/pentaho-kettle,HiromuHota/pentaho-kettle,eayoungs/pentaho-kettle,e-cuellar/pentaho-kettle,zlcnju/kettle,rfellows/pentaho-kettle,zlcnju/kettle,pavel-sakun/pentaho-kettle,andrei-viaryshka/pentaho-kettle,mbatchelor/pentaho-kettle,bmorrise/pentaho-kettle,yshakhau/pentaho-kettle,matthewtckr/pentaho-kettle,GauravAshara/pentaho-kettle,tkafalas/pentaho-kettle,wseyler/pentaho-kettle,nantunes/pentaho-kettle,kurtwalker/pentaho-kettle,tkafalas/pentaho-kettle,bmorrise/pentaho-kettle,kurtwalker/pentaho-kettle,nicoben/pentaho-kettle,mattyb149/pentaho-kettle,mkambol/pentaho-kettle,stevewillcock/pentaho-kettle,SergeyTravin/pentaho-kettle,jbrant/pentaho-kettle,tmcsantos/pentaho-kettle,airy-ict/pentaho-kettle,birdtsai/pentaho-kettle,rfellows/pentaho-kettle,nanata1115/pentaho-kettle,graimundo/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,emartin-pentaho/pentaho-kettle,denisprotopopov/pentaho-kettle,skofra0/pentaho-kettle,birdtsai/pentaho-kettle,stevewillcock/pentaho-kettle,pavel-sakun/pentaho-kettle,birdtsai/pentaho-kettle,MikhailHubanau/pentaho-kettle,YuryBY/pentaho-kettle,ccaspanello/pentaho-kettle,aminmkhan/pentaho-kettle,jbrant/pentaho-kettle,Advent51/pentaho-kettle,stepanovdg/pentaho-kettle,drndos/pentaho-kettle,nicoben/pentaho-kettle,tmcsantos/pentaho-kettle,brosander/pentaho-kettle,nantunes/pentaho-kettle,yshakhau/pentaho-kettle,YuryBY/pentaho-kettle,DFieldFL/pentaho-kettle,sajeetharan/pentaho-kettle,emartin-pentaho/pentaho-kettle,nantunes/pentaho-kettle,DFieldFL/pentaho-kettle,SergeyTravin/pentaho-kettle,ViswesvarSekar/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,mdamour1976/pentaho-kettle,e-cuellar/pentaho-kettle,denisprotopopov/pentaho-kettle,MikhailHubanau/pentaho-kettle,eayoungs/pentaho-kettle,pymjer/pentaho-kettle,mkambol/pentaho-kettle,tmcsantos/pentaho-kettle,Advent51/pentaho-kettle,SergeyTravin/pentaho-kettle,brosander/pentaho-kettle,YuryBY/pentaho-kettle,pentaho/pentaho-kettle,andrei-viaryshka/pentaho-kettle,ivanpogodin/pentaho-kettle | /**********************************************************************
** **
** This code belongs to the KETTLE project. **
** **
** Kettle, from version 2.2 on, is released into the public domain **
** under the Lesser GNU Public License (LGPL). **
** **
** For more details, please read the document LICENSE.txt, included **
** in this project **
** **
** http://www.kettle.be **
** [email protected] **
** **
**********************************************************************/
package org.pentaho.di.job;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.commons.vfs.FileName;
import org.apache.commons.vfs.FileObject;
import org.apache.commons.vfs.FileSystemException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.DBCache;
import org.pentaho.di.core.EngineMetaInterface;
import org.pentaho.di.core.LastUsedFile;
import org.pentaho.di.core.NotePadMeta;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.SQLStatement;
import org.pentaho.di.core.changed.ChangedFlag;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.gui.GUIPositionInterface;
import org.pentaho.di.core.gui.OverwritePrompter;
import org.pentaho.di.core.gui.Point;
import org.pentaho.di.core.gui.UndoInterface;
import org.pentaho.di.core.logging.LogWriter;
import org.pentaho.di.core.reflection.StringSearchResult;
import org.pentaho.di.core.reflection.StringSearcher;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.undo.TransAction;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.core.xml.XMLInterface;
import org.pentaho.di.job.entries.special.JobEntrySpecial;
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectory;
import org.pentaho.di.resource.ResourceDefinition;
import org.pentaho.di.resource.ResourceExportInterface;
import org.pentaho.di.resource.ResourceNamingInterface;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.shared.SharedObjects;
import org.pentaho.di.trans.HasDatabasesInterface;
import org.pentaho.di.trans.HasSlaveServersInterface;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
/**
* Defines a Job and provides methods to load, save, verify, etc.
*
* @author Matt
* @since 11-08-2003
*
*/
public class JobMeta extends ChangedFlag implements Cloneable, Comparable<JobMeta>, XMLInterface, UndoInterface,
HasDatabasesInterface,
VariableSpace, EngineMetaInterface,
ResourceExportInterface, HasSlaveServersInterface
{
public static final String XML_TAG = "job"; //$NON-NLS-1$
private static final String XML_TAG_SLAVESERVERS = "slaveservers"; //$NON-NLS-1$
public LogWriter log;
protected long id;
protected String name;
protected String description;
protected String extended_description;
protected String job_version;
protected int job_status;
protected String filename;
public List<JobEntryInterface> jobentries;
public List<JobEntryCopy> jobcopies;
public List<JobHopMeta> jobhops;
public List<NotePadMeta> notes;
public List<DatabaseMeta> databases;
private List<SlaveServer> slaveServers;
protected RepositoryDirectory directory;
protected String arguments[];
protected boolean changed_entries, changed_hops, changed_notes, changed_databases;
protected DatabaseMeta logconnection;
protected String logTable;
public DBCache dbcache;
protected List<TransAction> undo;
private VariableSpace variables = new Variables();
protected int max_undo;
protected int undo_position;
public static final int TYPE_UNDO_CHANGE = 1;
public static final int TYPE_UNDO_NEW = 2;
public static final int TYPE_UNDO_DELETE = 3;
public static final int TYPE_UNDO_POSITION = 4;
public static final String STRING_SPECIAL = "SPECIAL"; //$NON-NLS-1$
public static final String STRING_SPECIAL_START = "START"; //$NON-NLS-1$
public static final String STRING_SPECIAL_DUMMY = "DUMMY"; //$NON-NLS-1$
public static final String STRING_SPECIAL_OK = "OK"; //$NON-NLS-1$
public static final String STRING_SPECIAL_ERROR = "ERROR"; //$NON-NLS-1$
// Remember the size and position of the different windows...
public boolean max[] = new boolean[1];
public String created_user, modifiedUser;
public Date created_date, modifiedDate;
protected boolean useBatchId;
protected boolean batchIdPassed;
protected boolean logfieldUsed;
/** If this is null, we load from the default shared objects file : $KETTLE_HOME/.kettle/shared.xml */
protected String sharedObjectsFile;
public JobMeta(LogWriter l)
{
log = l;
clear();
initializeVariablesFrom(null);
}
public long getID()
{
return id;
}
public void setID(long id)
{
this.id = id;
}
public void clear()
{
name = null;
jobcopies = new ArrayList<JobEntryCopy>();
jobentries = new ArrayList<JobEntryInterface>();
jobhops = new ArrayList<JobHopMeta>();
notes = new ArrayList<NotePadMeta>();
databases = new ArrayList<DatabaseMeta>();
slaveServers = new ArrayList<SlaveServer>();
logconnection = null;
logTable = null;
arguments = null;
max_undo = Const.MAX_UNDO;
dbcache = DBCache.getInstance();
undo = new ArrayList<TransAction>();
undo_position = -1;
addDefaults();
setChanged(false);
created_user = "-"; //$NON-NLS-1$
created_date = new Date();
modifiedUser = "-"; //$NON-NLS-1$
modifiedDate = new Date();
directory = new RepositoryDirectory();
description=null;
job_status=-1;
job_version=null;
extended_description=null;
useBatchId=true;
logfieldUsed=true;
// setInternalKettleVariables(); Don't clear the internal variables for ad-hoc jobs, it's ruins the previews
// etc.
}
public void addDefaults()
{
/*
addStart(); // Add starting point!
addDummy(); // Add dummy!
addOK(); // errors == 0 evaluation
addError(); // errors != 0 evaluation
*/
clearChanged();
}
public static final JobEntryCopy createStartEntry()
{
JobEntrySpecial jobEntrySpecial = new JobEntrySpecial(STRING_SPECIAL_START, true, false);
JobEntryCopy jobEntry = new JobEntryCopy();
jobEntry.setID(-1L);
jobEntry.setEntry(jobEntrySpecial);
jobEntry.setLocation(50, 50);
jobEntry.setDrawn(false);
jobEntry.setDescription(Messages.getString("JobMeta.StartJobEntry.Description")); //$NON-NLS-1$
return jobEntry;
}
public static final JobEntryCopy createDummyEntry()
{
JobEntrySpecial jobEntrySpecial = new JobEntrySpecial(STRING_SPECIAL_DUMMY, false, true);
JobEntryCopy jobEntry = new JobEntryCopy();
jobEntry.setID(-1L);
jobEntry.setEntry(jobEntrySpecial);
jobEntry.setLocation(50, 50);
jobEntry.setDrawn(false);
jobEntry.setDescription(Messages.getString("JobMeta.DummyJobEntry.Description")); //$NON-NLS-1$
return jobEntry;
}
public JobEntryCopy getStart()
{
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy cge = getJobEntry(i);
if (cge.isStart()) return cge;
}
return null;
}
public JobEntryCopy getDummy()
{
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy cge = getJobEntry(i);
if (cge.isDummy()) return cge;
}
return null;
}
/**
* Compares two transformation on name, filename
*/
public int compare(JobMeta t1, JobMeta t2)
{
if (Const.isEmpty(t1.getName()) && !Const.isEmpty(t2.getName())) return -1;
if (!Const.isEmpty(t1.getName()) && Const.isEmpty(t2.getName())) return 1;
if (Const.isEmpty(t1.getName()) && Const.isEmpty(t2.getName()))
{
if (Const.isEmpty(t1.getFilename()) && !Const.isEmpty(t2.getFilename())) return -1;
if (!Const.isEmpty(t1.getFilename()) && Const.isEmpty(t2.getFilename())) return 1;
if (Const.isEmpty(t1.getFilename()) && Const.isEmpty(t2.getFilename())) { return 0; }
return t1.getFilename().compareTo(t2.getFilename());
}
return t1.getName().compareTo(t2.getName());
}
public int compareTo(JobMeta o)
{
return compare(this, o);
}
public boolean equals(Object obj)
{
if (!(obj instanceof JobMeta))
return false;
return compare(this,(JobMeta) obj) == 0;
}
public Object clone()
{
return realClone(true);
}
public Object realClone(boolean doClear)
{
try
{
JobMeta jobMeta = (JobMeta) super.clone();
if (doClear) {
jobMeta.clear();
} else {
jobMeta.jobcopies = new ArrayList<JobEntryCopy>();
jobMeta.jobentries = new ArrayList<JobEntryInterface>();
jobMeta.jobhops = new ArrayList<JobHopMeta>();
jobMeta.notes = new ArrayList<NotePadMeta>();
jobMeta.databases = new ArrayList<DatabaseMeta>();
jobMeta.slaveServers = new ArrayList<SlaveServer>();
}
for (JobEntryInterface entry : jobentries) jobMeta.jobentries.add((JobEntryInterface)entry.clone());
for (JobEntryCopy entry : jobcopies) jobMeta.jobcopies.add((JobEntryCopy)entry.clone_deep());
for (JobHopMeta entry : jobhops) jobMeta.jobhops.add((JobHopMeta)entry.clone());
for (NotePadMeta entry : notes) jobMeta.notes.add((NotePadMeta)entry.clone());
for (DatabaseMeta entry : databases) jobMeta.databases.add((DatabaseMeta)entry.clone());
for (SlaveServer slave : slaveServers) jobMeta.getSlaveServers().add((SlaveServer)slave.clone());
return jobMeta;
}
catch (CloneNotSupportedException e)
{
return null;
}
}
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
setInternalKettleVariables();
}
/**
* Builds a name - if no name is set, yet - from the filename
*/
public void nameFromFilename()
{
if (!Const.isEmpty(filename))
{
name = Const.createName(filename);
}
}
/**
* @return Returns the directory.
*/
public RepositoryDirectory getDirectory()
{
return directory;
}
/**
* @param directory The directory to set.
*/
public void setDirectory(RepositoryDirectory directory)
{
this.directory = directory;
setInternalKettleVariables();
}
public String getFilename()
{
return filename;
}
public void setFilename(String filename)
{
this.filename = filename;
setInternalKettleVariables();
}
public DatabaseMeta getLogConnection()
{
return logconnection;
}
public void setLogConnection(DatabaseMeta ci)
{
logconnection = ci;
}
/**
* @return Returns the databases.
*/
public List<DatabaseMeta> getDatabases()
{
return databases;
}
/**
* @param databases The databases to set.
*/
public void setDatabases(List<DatabaseMeta> databases)
{
this.databases = databases;
}
public void setChanged(boolean ch)
{
if (ch)
setChanged();
else
clearChanged();
}
public void clearChanged()
{
changed_entries = false;
changed_hops = false;
changed_notes = false;
changed_databases = false;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy entry = getJobEntry(i);
entry.setChanged(false);
}
for (JobHopMeta hi:jobhops) // Look at all the hops
{
hi.setChanged(false);
}
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta db = getDatabase(i);
db.setChanged(false);
}
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta note = getNote(i);
note.setChanged(false);
}
super.clearChanged();
}
public boolean hasChanged()
{
if (super.hasChanged()) return true;
if (haveJobEntriesChanged()) return true;
if (haveJobHopsChanged()) return true;
if (haveConnectionsChanged()) return true;
if (haveNotesChanged()) return true;
return false;
}
protected void saveRepJob(Repository rep) throws KettleException
{
try
{
// The ID has to be assigned, even when it's a new item...
rep.insertJob(getID(), directory.getID(), getName(), logconnection == null ? -1 : logconnection.getID(), logTable, modifiedUser,
modifiedDate, useBatchId, batchIdPassed, logfieldUsed, sharedObjectsFile,description,extended_description,job_version,
job_status, created_user,created_date);
}
catch (KettleDatabaseException dbe)
{
throw new KettleException(Messages.getString("JobMeta.Exception.UnableToSaveJobToRepository"), dbe); //$NON-NLS-1$
}
}
public boolean showReplaceWarning(Repository rep)
{
if (getID() < 0)
{
try
{
if (rep.getJobID(getName(), directory.getID()) > 0) return true;
}
catch (KettleException dbe)
{
return true;
}
}
return false;
}
/**
* This method asks all steps in the transformation whether or not the specified database connection is used.
* The connection is used in the transformation if any of the steps uses it or if it is being used to log to.
* @param databaseMeta The connection to check
* @return true if the connection is used in this transformation.
*/
public boolean isDatabaseConnectionUsed(DatabaseMeta databaseMeta)
{
for (int i=0;i<nrJobEntries();i++)
{
JobEntryCopy jobEntry = getJobEntry(i);
DatabaseMeta dbs[] = jobEntry.getEntry().getUsedDatabaseConnections();
for (int d=0;d<dbs.length;d++)
{
if (dbs[d]!=null && dbs[d].equals(databaseMeta)) return true;
}
}
if (logconnection!=null && logconnection.equals(databaseMeta)) return true;
return false;
}
public String getFileType() {
return LastUsedFile.FILE_TYPE_JOB;
}
public String[] getFilterNames() {
return Const.getJobFilterNames();
}
public String[] getFilterExtensions() {
return Const.STRING_JOB_FILTER_EXT;
}
public String getDefaultExtension() {
return Const.STRING_JOB_DEFAULT_EXT;
}
public String getXML()
{
Props props = null;
if (Props.isInitialized()) props=Props.getInstance();
DatabaseMeta ci = getLogConnection();
StringBuffer retval = new StringBuffer(500);
retval.append("<").append(XML_TAG).append(">").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", getName())); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("description", description)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("extended_description", extended_description));
retval.append(" ").append(XMLHandler.addTagValue("job_version", job_version));
if ( job_status >= 0 )
{
retval.append(" ").append(XMLHandler.addTagValue("job_status", job_status));
}
retval.append(" ").append(XMLHandler.addTagValue("directory", directory.getPath())); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("created_user", created_user)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("created_date", XMLHandler.date2string(created_date))); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
retval.append(" ").append(XMLHandler.addTagValue("modified_user", modifiedUser)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("modified_date", XMLHandler.date2string(created_date))); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Let's add the last known file location if we have any...
//
if (!Const.isEmpty(filename)) {
retval.append(" ").append(XMLHandler.addTagValue("filename", filename)); //$NON-NLS-1$ //$NON-NLS-2$
}
// Save the database connections...
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta dbMeta = getDatabase(i);
if (props!=null && props.areOnlyUsedConnectionsSavedToXML())
{
if (isDatabaseConnectionUsed(dbMeta))
{
retval.append(dbMeta.getXML());
}
}
else
{
retval.append(dbMeta.getXML());
}
}
// The slave servers...
//
retval.append(" ").append(XMLHandler.openTag(XML_TAG_SLAVESERVERS)).append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < slaveServers.size(); i++)
{
SlaveServer slaveServer = slaveServers.get(i);
retval.append(" ").append(slaveServer.getXML()).append(Const.CR);
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_SLAVESERVERS)).append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("logconnection", ci == null ? "" : ci.getName())); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
retval.append(" ").append(XMLHandler.addTagValue("logtable", logTable)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("use_batchid", useBatchId)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("pass_batchid", batchIdPassed)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("use_logfield", logfieldUsed)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("shared_objects_file", sharedObjectsFile)); // $NON-NLS-1$
retval.append(" <entries>").append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy jge = getJobEntry(i);
retval.append(jge.getXML());
}
retval.append(" </entries>").append(Const.CR); //$NON-NLS-1$
retval.append(" <hops>").append(Const.CR); //$NON-NLS-1$
for (JobHopMeta hi:jobhops) // Look at all the hops
{
retval.append(hi.getXML());
}
retval.append(" </hops>").append(Const.CR); //$NON-NLS-1$
retval.append(" <notepads>").append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta ni = getNote(i);
retval.append(ni.getXML());
}
retval.append(" </notepads>").append(Const.CR); //$NON-NLS-1$
retval.append("</").append(XML_TAG).append(">").append(Const.CR); //$NON-NLS-1$
return retval.toString();
}
public JobMeta(LogWriter log, String fname, Repository rep) throws KettleXMLException
{
this(log, null, fname, rep, null);
}
public JobMeta(LogWriter log, String fname, Repository rep, OverwritePrompter prompter) throws KettleXMLException
{
this(log, null, fname, rep, prompter);
}
/**
* Load the job from the XML file specified.
*
* @param log the logging channel
* @param fname The filename to load as a job
* @param rep The repository to bind againt, null if there is no repository available.
* @throws KettleXMLException
*/
public JobMeta(LogWriter log, VariableSpace parentSpace, String fname, Repository rep, OverwritePrompter prompter) throws KettleXMLException
{
this.log = log;
this.initializeVariablesFrom(parentSpace);
try
{
// OK, try to load using the VFS stuff...
Document doc = XMLHandler.loadXMLFile(KettleVFS.getFileObject(fname));
if (doc != null)
{
// Clear the job
clear();
// The jobnode
Node jobnode = XMLHandler.getSubNode(doc, XML_TAG);
loadXML(jobnode, rep, prompter);
// Do this at the end
setFilename(fname);
}
else
{
throw new KettleXMLException(Messages.getString("JobMeta.Exception.ErrorReadingFromXMLFile") + fname); //$NON-NLS-1$
}
}
catch (Exception e)
{
throw new KettleXMLException(Messages.getString("JobMeta.Exception.UnableToLoadJobFromXMLFile") + fname + "]", e); //$NON-NLS-1$ //$NON-NLS-2$
}
}
public JobMeta(LogWriter log, Node jobnode, Repository rep, OverwritePrompter prompter) throws KettleXMLException
{
this.log = log;
loadXML(jobnode, rep, prompter);
}
public boolean isRepReference() {
return isRepReference(getFilename(), this.getName());
}
public boolean isFileReference() {
return !isRepReference(getFilename(), this.getName());
}
public static boolean isRepReference(String fileName, String transName) {
return Const.isEmpty(fileName) && !Const.isEmpty(transName);
}
public static boolean isFileReference(String fileName, String transName) {
return !isRepReference(fileName, transName);
}
public void loadXML(Node jobnode, Repository rep, OverwritePrompter prompter ) throws KettleXMLException
{
Props props = null;
if (Props.isInitialized()) props = Props.getInstance();
try
{
// clear the jobs;
clear();
//
// get job info:
//
name = XMLHandler.getTagValue(jobnode, "name"); //$NON-NLS-1$
// description
description = XMLHandler.getTagValue(jobnode, "description");
// extended description
extended_description = XMLHandler.getTagValue(jobnode, "extended_description");
// job version
job_version = XMLHandler.getTagValue(jobnode, "job_version");
// job status
job_status = Const.toInt(XMLHandler.getTagValue(jobnode, "job_status"),-1);
// Created user/date
created_user = XMLHandler.getTagValue(jobnode, "created_user"); //$NON-NLS-1$
String createDate = XMLHandler.getTagValue(jobnode, "created_date"); //$NON-NLS-1$
if (createDate != null)
{
created_date = XMLHandler.stringToDate(createDate);
}
// Changed user/date
modifiedUser = XMLHandler.getTagValue(jobnode, "modified_user"); //$NON-NLS-1$
String modDate = XMLHandler.getTagValue(jobnode, "modified_date"); //$NON-NLS-1$
if (modDate != null)
{
modifiedDate = XMLHandler.stringToDate(modDate);
}
// Also load and set the filename
//
filename = XMLHandler.getTagValue(jobnode, "filename"); //$NON-NLS-1$
// Load the default list of databases
// Read objects from the shared XML file & the repository
try
{
sharedObjectsFile = XMLHandler.getTagValue(jobnode, "shared_objects_file"); //$NON-NLS-1$ //$NON-NLS-2$
readSharedObjects(rep);
}
catch(Exception e)
{
LogWriter.getInstance().logError(toString(), Messages.getString("JobMeta.ErrorReadingSharedObjects.Message", e.toString())); // $NON-NLS-1$ //$NON-NLS-1$
LogWriter.getInstance().logError(toString(), Const.getStackTracker(e));
}
//
// Read the database connections
//
int nr = XMLHandler.countNodes(jobnode, "connection"); //$NON-NLS-1$
for (int i = 0; i < nr; i++)
{
Node dbnode = XMLHandler.getSubNodeByNr(jobnode, "connection", i); //$NON-NLS-1$
DatabaseMeta dbcon = new DatabaseMeta(dbnode);
DatabaseMeta exist = findDatabase(dbcon.getName());
if (exist == null)
{
addDatabase(dbcon);
}
else
{
boolean askOverwrite = Props.isInitialized() ? props.askAboutReplacingDatabaseConnections() : false;
boolean overwrite = Props.isInitialized() ? props.replaceExistingDatabaseConnections() : true;
if (askOverwrite && prompter != null)
{
overwrite = prompter.overwritePrompt(
Messages.getString("JobMeta.Dialog.ConnectionExistsOverWrite.Message", dbcon.getName() ),
Messages.getString("JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"),
Props.STRING_ASK_ABOUT_REPLACING_DATABASES);
}
if (overwrite)
{
int idx = indexOfDatabase(exist);
removeDatabase(idx);
addDatabase(idx, dbcon);
}
}
}
// Read the slave servers...
//
Node slaveServersNode = XMLHandler.getSubNode(jobnode, XML_TAG_SLAVESERVERS); //$NON-NLS-1$
int nrSlaveServers = XMLHandler.countNodes(slaveServersNode, SlaveServer.XML_TAG); //$NON-NLS-1$
for (int i = 0 ; i < nrSlaveServers ; i++)
{
Node slaveServerNode = XMLHandler.getSubNodeByNr(slaveServersNode, SlaveServer.XML_TAG, i);
SlaveServer slaveServer = new SlaveServer(slaveServerNode);
// Check if the object exists and if it's a shared object.
// If so, then we will keep the shared version, not this one.
// The stored XML is only for backup purposes.
SlaveServer check = findSlaveServer(slaveServer.getName());
if (check!=null)
{
if (!check.isShared()) // we don't overwrite shared objects.
{
addOrReplaceSlaveServer(slaveServer);
}
}
else
{
slaveServers.add(slaveServer);
}
}
/*
* Get the log database connection & log table
*/
String logcon = XMLHandler.getTagValue(jobnode, "logconnection"); //$NON-NLS-1$
logconnection = findDatabase(logcon);
logTable = XMLHandler.getTagValue(jobnode, "logtable"); //$NON-NLS-1$
useBatchId = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_batchid")); //$NON-NLS-1$ //$NON-NLS-2$
batchIdPassed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "pass_batchid")); //$NON-NLS-1$ //$NON-NLS-2$
logfieldUsed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_logfield")); //$NON-NLS-1$ //$NON-NLS-2$
/*
* read the job entries...
*/
Node entriesnode = XMLHandler.getSubNode(jobnode, "entries"); //$NON-NLS-1$
int tr = XMLHandler.countNodes(entriesnode, "entry"); //$NON-NLS-1$
for (int i = 0; i < tr; i++)
{
Node entrynode = XMLHandler.getSubNodeByNr(entriesnode, "entry", i); //$NON-NLS-1$
// System.out.println("Reading entry:\n"+entrynode);
JobEntryCopy je = new JobEntryCopy(entrynode, databases, slaveServers, rep);
JobEntryCopy prev = findJobEntry(je.getName(), 0, true);
if (prev != null)
{
if (je.getNr() == 0) // See if the #0 already exists!
{
// Replace previous version with this one: remove it first
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
}
else
if (je.getNr() > 0) // Use previously defined JobEntry info!
{
je.setEntry(prev.getEntry());
// See if entry already exists...
prev = findJobEntry(je.getName(), je.getNr(), true);
if (prev != null) // remove the old one!
{
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
}
}
}
// Add the JobEntryCopy...
addJobEntry(je);
}
Node hopsnode = XMLHandler.getSubNode(jobnode, "hops"); //$NON-NLS-1$
int ho = XMLHandler.countNodes(hopsnode, "hop"); //$NON-NLS-1$
for (int i = 0; i < ho; i++)
{
Node hopnode = XMLHandler.getSubNodeByNr(hopsnode, "hop", i); //$NON-NLS-1$
JobHopMeta hi = new JobHopMeta(hopnode, this);
jobhops.add(hi);
}
// Read the notes...
Node notepadsnode = XMLHandler.getSubNode(jobnode, "notepads"); //$NON-NLS-1$
int nrnotes = XMLHandler.countNodes(notepadsnode, "notepad"); //$NON-NLS-1$
for (int i = 0; i < nrnotes; i++)
{
Node notepadnode = XMLHandler.getSubNodeByNr(notepadsnode, "notepad", i); //$NON-NLS-1$
NotePadMeta ni = new NotePadMeta(notepadnode);
notes.add(ni);
}
clearChanged();
}
catch (Exception e)
{
throw new KettleXMLException(Messages.getString("JobMeta.Exception.UnableToLoadJobFromXMLNode"), e); //$NON-NLS-1$
}
finally
{
setInternalKettleVariables();
}
}
/**
* Read the database connections in the repository and add them to this job if they are not yet present.
*
* @param rep The repository to load the database connections from.
* @throws KettleException
*/
public void readDatabases(Repository rep) throws KettleException
{
readDatabases(rep, true);
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.trans.HasDatabaseInterface#readDatabases(org.pentaho.di.repository.Repository, boolean)
*/
public void readDatabases(Repository rep, boolean overWriteShared) throws KettleException
{
try
{
long dbids[] = rep.getDatabaseIDs();
for (int i = 0; i < dbids.length; i++)
{
DatabaseMeta databaseMeta = new DatabaseMeta(rep, dbids[i]);
DatabaseMeta check = findDatabase(databaseMeta.getName()); // Check if there already is one in the
// transformation
if (check == null || overWriteShared) // We only add, never overwrite database connections.
{
if (databaseMeta.getName() != null)
{
addOrReplaceDatabase(databaseMeta);
if (!overWriteShared) databaseMeta.setChanged(false);
}
}
}
setChanged(false);
}
catch (KettleDatabaseException dbe)
{
throw new KettleException(Messages.getString("JobMeta.Log.UnableToReadDatabaseIDSFromRepository"), dbe); //$NON-NLS-1$
}
catch (KettleException ke)
{
throw new KettleException(Messages.getString("JobMeta.Log.UnableToReadDatabasesFromRepository"), ke); //$NON-NLS-1$
}
}
public void readSharedObjects(Repository rep) throws KettleException
{
// Extract the shared steps, connections, etc. using the SharedObjects class
//
String soFile = environmentSubstitute(sharedObjectsFile);
SharedObjects sharedObjects = new SharedObjects(soFile);
Map<?, SharedObjectInterface> objectsMap = sharedObjects.getObjectsMap();
// First read the databases...
// We read databases & slaves first because there might be dependencies that need to be resolved.
//
for (SharedObjectInterface object : objectsMap.values())
{
if (object instanceof DatabaseMeta)
{
DatabaseMeta databaseMeta = (DatabaseMeta) object;
addOrReplaceDatabase(databaseMeta);
}
else if (object instanceof SlaveServer)
{
SlaveServer slaveServer = (SlaveServer) object;
addOrReplaceSlaveServer(slaveServer);
}
}
if (rep!=null)
{
readDatabases(rep, true);
}
}
public boolean saveSharedObjects()
{
try
{
// First load all the shared objects...
String soFile = environmentSubstitute(sharedObjectsFile);
SharedObjects sharedObjects = new SharedObjects(soFile);
// Now overwrite the objects in there
List<Object> shared = new ArrayList<Object>();
shared.addAll(databases);
shared.addAll(slaveServers);
// The databases connections...
for (int i=0;i<shared.size();i++)
{
SharedObjectInterface sharedObject = (SharedObjectInterface) shared.get(i);
if (sharedObject.isShared())
{
sharedObjects.storeObject(sharedObject);
}
}
// Save the objects
sharedObjects.saveToFile();
return true;
}
catch(Exception e)
{
log.logError(toString(), "Unable to save shared ojects: "+e.toString());
return false;
}
}
/**
* Find a database connection by it's name
*
* @param name The database name to look for
* @return The database connection or null if nothing was found.
*/
public DatabaseMeta findDatabase(String name)
{
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta ci = getDatabase(i);
if (ci.getName().equalsIgnoreCase(name)) { return ci; }
}
return null;
}
public void saveRep(Repository rep) throws KettleException
{
saveRep(rep, null);
}
public void saveRep(Repository rep, IProgressMonitor monitor) throws KettleException
{
try
{
int nrWorks = 2 + nrDatabases() + nrNotes() + nrJobEntries() + nrJobHops();
if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.SavingTransformation") + directory + Const.FILE_SEPARATOR + getName(), nrWorks); //$NON-NLS-1$
rep.lockRepository();
rep.insertLogEntry("save job '"+getName()+"'"); //$NON-NLS-1$ //$NON-NLS-2$
// Before we start, make sure we have a valid job ID!
// Two possibilities:
// 1) We have a ID: keep it
// 2) We don't have an ID: look it up.
// If we find a transformation with the same name: ask!
//
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.HandlingPreviousVersionOfJob")); //$NON-NLS-1$
setID(rep.getJobID(getName(), directory.getID()));
// If no valid id is available in the database, assign one...
if (getID() <= 0)
{
setID(rep.getNextJobID());
}
else
{
// If we have a valid ID, we need to make sure everything is cleared out
// of the database for this id_job, before we put it back in...
rep.delAllFromJob(getID());
}
if (monitor != null) monitor.worked(1);
// Now, save the job entry in R_JOB
// Note, we save this first so that we have an ID in the database.
// Everything else depends on this ID, including recursive job entries to the save job. (retry)
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.SavingJobDetails")); //$NON-NLS-1$
log.logDetailed(toString(), "Saving job info to repository..."); //$NON-NLS-1$
saveRepJob(rep);
if (monitor != null) monitor.worked(1);
// Save the slaves
//
for (int i=0;i<slaveServers.size();i++)
{
SlaveServer slaveServer = slaveServers.get(i);
slaveServer.saveRep(rep, getID(), false);
}
//
// Save the notes
//
log.logDetailed(toString(), "Saving notes to repository..."); //$NON-NLS-1$
for (int i = 0; i < nrNotes(); i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.SavingNoteNr") + (i + 1) + "/" + nrNotes()); //$NON-NLS-1$ //$NON-NLS-2$
NotePadMeta ni = getNote(i);
ni.saveRep(rep, getID());
if (ni.getID() > 0)
{
rep.insertJobNote(getID(), ni.getID());
}
if (monitor != null) monitor.worked(1);
}
//
// Save the job entries
//
log.logDetailed(toString(), "Saving " + nrJobEntries() + " Job enty copies to repository..."); //$NON-NLS-1$ //$NON-NLS-2$
rep.updateJobEntryTypes();
for (int i = 0; i < nrJobEntries(); i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.SavingJobEntryNr") + (i + 1) + "/" + nrJobEntries()); //$NON-NLS-1$ //$NON-NLS-2$
JobEntryCopy cge = getJobEntry(i);
cge.saveRep(rep, getID());
if (monitor != null) monitor.worked(1);
}
log.logDetailed(toString(), "Saving job hops to repository..."); //$NON-NLS-1$
for (int i = 0; i < nrJobHops(); i++)
{
if (monitor != null) monitor.subTask("Saving job hop #" + (i + 1) + "/" + nrJobHops()); //$NON-NLS-1$ //$NON-NLS-2$
JobHopMeta hi = getJobHop(i);
hi.saveRep(rep, getID());
if (monitor != null) monitor.worked(1);
}
// Commit this transaction!!
rep.commit();
clearChanged();
if (monitor != null) monitor.done();
}
catch (KettleDatabaseException dbe)
{
rep.rollback();
throw new KettleException(Messages.getString("JobMeta.Exception.UnableToSaveJobInRepositoryRollbackPerformed"), dbe); //$NON-NLS-1$
}
finally
{
// don't forget to unlock the repository.
// Normally this is done by the commit / rollback statement, but hey there are some freaky database out
// there...
rep.unlockRepository();
}
}
/**
* Load a job in a directory
*
* @param log the logging channel
* @param rep The Repository
* @param jobname The name of the job
* @param repdir The directory in which the job resides.
* @throws KettleException
*/
public JobMeta(LogWriter log, Repository rep, String jobname, RepositoryDirectory repdir) throws KettleException
{
this(log, rep, jobname, repdir, null);
}
/**
* Load a job in a directory
*
* @param log the logging channel
* @param rep The Repository
* @param jobname The name of the job
* @param repdir The directory in which the job resides.
* @throws KettleException
*/
public JobMeta(LogWriter log, Repository rep, String jobname, RepositoryDirectory repdir, IProgressMonitor monitor) throws KettleException
{
this.log = log;
try
{
// Clear everything...
clear();
directory = repdir;
// Get the transformation id
setID(rep.getJobID(jobname, repdir.getID()));
// If no valid id is available in the database, then give error...
if (getID() > 0)
{
// Load the notes...
long noteids[] = rep.getJobNoteIDs(getID());
long jecids[] = rep.getJobEntryCopyIDs(getID());
long hopid[] = rep.getJobHopIDs(getID());
int nrWork = 2 + noteids.length + jecids.length + hopid.length;
if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork); //$NON-NLS-1$
//
// get job info:
//
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingJobInformation")); //$NON-NLS-1$
RowMetaAndData jobRow = rep.getJob(getID());
name = jobRow.getString("NAME", null); //$NON-NLS-1$
description = jobRow.getString("DESCRIPTION", null); //$NON-NLS-1$
extended_description = jobRow.getString("EXTENDED_DESCRIPTION", null); //$NON-NLS-1$
job_version = jobRow.getString("JOB_VERSION", null); //$NON-NLS-1$
job_status = Const.toInt(jobRow.getString("JOB_STATUS", null),-1); //$NON-NLS-1$
logTable = jobRow.getString("TABLE_NAME_LOG", null); //$NON-NLS-1$
created_user = jobRow.getString("CREATED_USER", null); //$NON-NLS-1$
created_date = jobRow.getDate("CREATED_DATE", new Date()); //$NON-NLS-1$
modifiedUser = jobRow.getString("MODIFIED_USER", null); //$NON-NLS-1$
modifiedDate = jobRow.getDate("MODIFIED_DATE", new Date()); //$NON-NLS-1$
long id_logdb = jobRow.getInteger("ID_DATABASE_LOG", 0); //$NON-NLS-1$
if (id_logdb > 0)
{
// Get the logconnection
logconnection = new DatabaseMeta(rep, id_logdb);
}
useBatchId = jobRow.getBoolean("USE_BATCH_ID", false); //$NON-NLS-1$
batchIdPassed = jobRow.getBoolean("PASS_BATCH_ID", false); //$NON-NLS-1$
logfieldUsed = jobRow.getBoolean("USE_LOGFIELD", false); //$NON-NLS-1$
if (monitor != null) monitor.worked(1);
//
// Load the common database connections
//
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingAvailableDatabasesFromRepository")); //$NON-NLS-1$
// Read objects from the shared XML file & the repository
try
{
sharedObjectsFile = jobRow.getString("SHARED_FILE", null);
readSharedObjects(rep);
}
catch(Exception e)
{
LogWriter.getInstance().logError(toString(), Messages.getString("JobMeta.ErrorReadingSharedObjects.Message", e.toString())); // $NON-NLS-1$ //$NON-NLS-1$
LogWriter.getInstance().logError(toString(), Const.getStackTracker(e));
}
if (monitor != null) monitor.worked(1);
log.logDetailed(toString(), "Loading " + noteids.length + " notes"); //$NON-NLS-1$ //$NON-NLS-2$
for (int i = 0; i < noteids.length; i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length); //$NON-NLS-1$ //$NON-NLS-2$
NotePadMeta ni = new NotePadMeta(log, rep, noteids[i]);
if (indexOfNote(ni) < 0) addNote(ni);
if (monitor != null) monitor.worked(1);
}
// Load the job entries...
log.logDetailed(toString(), "Loading " + jecids.length + " job entries"); //$NON-NLS-1$ //$NON-NLS-2$
for (int i = 0; i < jecids.length; i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length)); //$NON-NLS-1$ //$NON-NLS-2$
JobEntryCopy jec = new JobEntryCopy(log, rep, getID(), jecids[i], jobentries, databases, slaveServers);
// Also set the copy number...
// We count the number of job entry copies that use the job entry
//
int copyNr = 0;
for (JobEntryCopy copy : jobcopies) {
if (jec.getEntry()==copy.getEntry()) {
copyNr++;
}
}
jec.setNr(copyNr);
int idx = indexOfJobEntry(jec);
if (idx < 0)
{
if (jec.getName() != null && jec.getName().length() > 0) addJobEntry(jec);
}
else
{
setJobEntry(idx, jec); // replace it!
}
if (monitor != null) monitor.worked(1);
}
// Load the hops...
log.logDetailed(toString(), "Loading " + hopid.length + " job hops"); //$NON-NLS-1$ //$NON-NLS-2$
for (int i = 0; i < hopid.length; i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length)); //$NON-NLS-1$ //$NON-NLS-2$
JobHopMeta hi = new JobHopMeta(rep, hopid[i], this, jobcopies);
jobhops.add(hi);
if (monitor != null) monitor.worked(1);
}
// Finally, clear the changed flags...
clearChanged();
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.FinishedLoadOfJob")); //$NON-NLS-1$
if (monitor != null) monitor.done();
}
else
{
throw new KettleException(Messages.getString("JobMeta.Exception.CanNotFindJob") + jobname); //$NON-NLS-1$
}
}
catch (KettleException dbe)
{
throw new KettleException(Messages.getString("JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
}
finally
{
setInternalKettleVariables();
}
}
public JobEntryCopy getJobEntryCopy(int x, int y, int iconsize)
{
int i, s;
s = nrJobEntries();
for (i = s - 1; i >= 0; i--) // Back to front because drawing goes from start to end
{
JobEntryCopy je = getJobEntry(i);
Point p = je.getLocation();
if (p != null)
{
if (x >= p.x && x <= p.x + iconsize && y >= p.y && y <= p.y + iconsize) { return je; }
}
}
return null;
}
public int nrJobEntries()
{
return jobcopies.size();
}
public int nrJobHops()
{
return jobhops.size();
}
public int nrNotes()
{
return notes.size();
}
public int nrDatabases()
{
return databases.size();
}
public JobHopMeta getJobHop(int i)
{
return jobhops.get(i);
}
public JobEntryCopy getJobEntry(int i)
{
return jobcopies.get(i);
}
public NotePadMeta getNote(int i)
{
return notes.get(i);
}
public DatabaseMeta getDatabase(int i)
{
return databases.get(i);
}
public void addJobEntry(JobEntryCopy je)
{
jobcopies.add(je);
setChanged();
}
public void addJobHop(JobHopMeta hi)
{
jobhops.add(hi);
setChanged();
}
public void addNote(NotePadMeta ni)
{
notes.add(ni);
setChanged();
}
public void addDatabase(DatabaseMeta ci)
{
databases.add(ci);
changed_databases = true;
}
public void addJobEntry(int p, JobEntryCopy si)
{
jobcopies.add(p, si);
changed_entries = true;
}
public void addJobHop(int p, JobHopMeta hi)
{
jobhops.add(p, hi);
changed_hops = true;
}
public void addNote(int p, NotePadMeta ni)
{
notes.add(p, ni);
changed_notes = true;
}
public void addDatabase(int p, DatabaseMeta ci)
{
databases.add(p, ci);
changed_databases = true;
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.trans.HasDatabaseInterface#addOrReplaceDatabase(org.pentaho.di.core.database.DatabaseMeta)
*/
public void addOrReplaceDatabase(DatabaseMeta databaseMeta)
{
int index = databases.indexOf(databaseMeta);
if (index < 0)
{
databases.add(databaseMeta);
}
else
{
DatabaseMeta previous = getDatabase(index);
previous.replaceMeta(databaseMeta);
}
changed_databases = true;
}
/**
* Add a new slave server to the transformation if that didn't exist yet.
* Otherwise, replace it.
*
* @param slaveServer The slave server to be added.
*/
public void addOrReplaceSlaveServer(SlaveServer slaveServer)
{
int index = slaveServers.indexOf(slaveServer);
if (index<0)
{
slaveServers.add(slaveServer);
}
else
{
SlaveServer previous = slaveServers.get(index);
previous.replaceMeta(slaveServer);
}
setChanged();
}
public void removeJobEntry(int i)
{
jobcopies.remove(i);
setChanged();
}
public void removeJobHop(int i)
{
jobhops.remove(i);
setChanged();
}
public void removeNote(int i)
{
notes.remove(i);
setChanged();
}
public void raiseNote(int p)
{
// if valid index and not last index
if ((p >=0) && (p < notes.size()-1))
{
NotePadMeta note = notes.remove(p);
notes.add(note);
changed_notes = true;
}
}
public void lowerNote(int p)
{
// if valid index and not first index
if ((p >0) && (p < notes.size()))
{
NotePadMeta note = notes.remove(p);
notes.add(0, note);
changed_notes = true;
}
}
public void removeDatabase(int i)
{
if (i < 0 || i >= databases.size()) return;
databases.remove(i);
changed_databases = true;
}
public int indexOfJobHop(JobHopMeta he)
{
return jobhops.indexOf(he);
}
public int indexOfNote(NotePadMeta ni)
{
return notes.indexOf(ni);
}
public int indexOfJobEntry(JobEntryCopy ge)
{
return jobcopies.indexOf(ge);
}
public int indexOfDatabase(DatabaseMeta di)
{
return databases.indexOf(di);
}
public void setJobEntry(int idx, JobEntryCopy jec)
{
jobcopies.set(idx, jec);
}
/**
* Find an existing JobEntryCopy by it's name and number
*
* @param name The name of the job entry copy
* @param nr The number of the job entry copy
* @return The JobEntryCopy or null if nothing was found!
*/
public JobEntryCopy findJobEntry(String name, int nr, boolean searchHiddenToo)
{
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy jec = getJobEntry(i);
if (jec.getName().equalsIgnoreCase(name) && jec.getNr() == nr)
{
if (searchHiddenToo || jec.isDrawn()) { return jec; }
}
}
return null;
}
public JobEntryCopy findJobEntry(String full_name_nr)
{
int i;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy jec = getJobEntry(i);
JobEntryInterface je = jec.getEntry();
if (je.toString().equalsIgnoreCase(full_name_nr)) { return jec; }
}
return null;
}
public JobHopMeta findJobHop(String name)
{
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.toString().equalsIgnoreCase(name)) { return hi; }
}
return null;
}
public JobHopMeta findJobHopFrom(JobEntryCopy jge)
{
if (jge != null) {
for (JobHopMeta hi:jobhops)
{
if (hi!=null && (hi.from_entry != null) && hi.from_entry.equals(jge)) // return the first
{
return hi;
}
}
}
return null;
}
public JobHopMeta findJobHop(JobEntryCopy from, JobEntryCopy to)
{
for (JobHopMeta hi:jobhops)
{
if (hi.isEnabled())
{
if (hi != null && hi.from_entry != null && hi.to_entry != null && hi.from_entry.equals(from) && hi.to_entry.equals(to)) { return hi; }
}
}
return null;
}
public JobHopMeta findJobHopTo(JobEntryCopy jge)
{
for (JobHopMeta hi:jobhops)
{
if (hi != null && hi.to_entry != null && hi.to_entry.equals(jge)) // Return the first!
{ return hi; }
}
return null;
}
public int findNrPrevJobEntries(JobEntryCopy from)
{
return findNrPrevJobEntries(from, false);
}
public JobEntryCopy findPrevJobEntry(JobEntryCopy to, int nr)
{
return findPrevJobEntry(to, nr, false);
}
public int findNrPrevJobEntries(JobEntryCopy to, boolean info)
{
int count = 0;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.isEnabled() && hi.to_entry.equals(to))
{
count++;
}
}
return count;
}
public JobEntryCopy findPrevJobEntry(JobEntryCopy to, int nr, boolean info)
{
int count = 0;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.isEnabled() && hi.to_entry.equals(to))
{
if (count == nr) { return hi.from_entry; }
count++;
}
}
return null;
}
public int findNrNextJobEntries(JobEntryCopy from)
{
int count = 0;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.isEnabled() && (hi.from_entry != null) && hi.from_entry.equals(from)) count++;
}
return count;
}
public JobEntryCopy findNextJobEntry(JobEntryCopy from, int cnt)
{
int count = 0;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.isEnabled() && (hi.from_entry != null) && hi.from_entry.equals(from))
{
if (count == cnt) { return hi.to_entry; }
count++;
}
}
return null;
}
public boolean hasLoop(JobEntryCopy entry)
{
return hasLoop(entry, null);
}
public boolean hasLoop(JobEntryCopy entry, JobEntryCopy lookup)
{
return false;
}
public boolean isEntryUsedInHops(JobEntryCopy jge)
{
JobHopMeta fr = findJobHopFrom(jge);
JobHopMeta to = findJobHopTo(jge);
if (fr != null || to != null) return true;
return false;
}
public int countEntries(String name)
{
int count = 0;
int i;
for (i = 0; i < nrJobEntries(); i++) // Look at all the hops;
{
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name)) count++;
}
return count;
}
public int generateJobEntryNameNr(String basename)
{
int nr = 1;
JobEntryCopy e = findJobEntry(basename + " " + nr, 0, true); //$NON-NLS-1$
while (e != null)
{
nr++;
e = findJobEntry(basename + " " + nr, 0, true); //$NON-NLS-1$
}
return nr;
}
public int findUnusedNr(String name)
{
int nr = 1;
JobEntryCopy je = findJobEntry(name, nr, true);
while (je != null)
{
nr++;
// log.logDebug("findUnusedNr()", "Trying unused nr: "+nr);
je = findJobEntry(name, nr, true);
}
return nr;
}
public int findMaxNr(String name)
{
int max = 0;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name))
{
if (je.getNr() > max) max = je.getNr();
}
}
return max;
}
/**
* Proposes an alternative job entry name when the original already exists...
*
* @param entryname The job entry name to find an alternative for..
* @return The alternative stepname.
*/
public String getAlternativeJobentryName(String entryname)
{
String newname = entryname;
JobEntryCopy jec = findJobEntry(newname);
int nr = 1;
while (jec != null)
{
nr++;
newname = entryname + " " + nr; //$NON-NLS-1$
jec = findJobEntry(newname);
}
return newname;
}
public JobEntryCopy[] getAllJobGraphEntries(String name)
{
int count = 0;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name)) count++;
}
JobEntryCopy retval[] = new JobEntryCopy[count];
count = 0;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name))
{
retval[count] = je;
count++;
}
}
return retval;
}
public JobHopMeta[] getAllJobHopsUsing(String name)
{
List<JobHopMeta> hops = new ArrayList<JobHopMeta>();
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.from_entry != null && hi.to_entry != null)
{
if (hi.from_entry.getName().equalsIgnoreCase(name) || hi.to_entry.getName().equalsIgnoreCase(name))
{
hops.add(hi);
}
}
}
return hops.toArray(new JobHopMeta[hops.size()]);
}
public NotePadMeta getNote(int x, int y)
{
int i, s;
s = notes.size();
for (i = s - 1; i >= 0; i--) // Back to front because drawing goes from start to end
{
NotePadMeta ni = notes.get(i);
Point loc = ni.getLocation();
Point p = new Point(loc.x, loc.y);
if (x >= p.x && x <= p.x + ni.width + 2 * Const.NOTE_MARGIN && y >= p.y && y <= p.y + ni.height + 2 * Const.NOTE_MARGIN) { return ni; }
}
return null;
}
public void selectAll()
{
int i;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy ce = getJobEntry(i);
ce.setSelected(true);
}
setChanged();
notifyObservers("refreshGraph");
}
public void unselectAll()
{
int i;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy ce = getJobEntry(i);
ce.setSelected(false);
}
}
public int getMaxUndo()
{
return max_undo;
}
public void setMaxUndo(int mu)
{
max_undo = mu;
while (undo.size() > mu && undo.size() > 0)
undo.remove(0);
}
public int getUndoSize()
{
if (undo == null) return 0;
return undo.size();
}
public void clearUndo()
{
undo = new ArrayList<TransAction>();
undo_position = -1;
}
public void addUndo(Object from[], Object to[], int pos[], Point prev[], Point curr[], int type_of_change, boolean nextAlso)
{
// First clean up after the current position.
// Example: position at 3, size=5
// 012345
// ^
// remove 34
// Add 4
// 01234
while (undo.size() > undo_position + 1 && undo.size() > 0)
{
int last = undo.size() - 1;
undo.remove(last);
}
TransAction ta = new TransAction();
switch (type_of_change)
{
case TYPE_UNDO_CHANGE:
ta.setChanged(from, to, pos);
break;
case TYPE_UNDO_DELETE:
ta.setDelete(from, pos);
break;
case TYPE_UNDO_NEW:
ta.setNew(from, pos);
break;
case TYPE_UNDO_POSITION:
ta.setPosition(from, pos, prev, curr);
break;
}
undo.add(ta);
undo_position++;
if (undo.size() > max_undo)
{
undo.remove(0);
undo_position--;
}
}
// get previous undo, change position
public TransAction previousUndo()
{
if (undo.isEmpty() || undo_position < 0) return null; // No undo left!
TransAction retval = undo.get(undo_position);
undo_position--;
return retval;
}
/**
* View current undo, don't change undo position
*
* @return The current undo transaction
*/
public TransAction viewThisUndo()
{
if (undo.isEmpty() || undo_position < 0) return null; // No undo left!
TransAction retval = undo.get(undo_position);
return retval;
}
// View previous undo, don't change position
public TransAction viewPreviousUndo()
{
if (undo.isEmpty() || undo_position < 0) return null; // No undo left!
TransAction retval = undo.get(undo_position);
return retval;
}
public TransAction nextUndo()
{
int size = undo.size();
if (size == 0 || undo_position >= size - 1) return null; // no redo left...
undo_position++;
TransAction retval = undo.get(undo_position);
return retval;
}
public TransAction viewNextUndo()
{
int size = undo.size();
if (size == 0 || undo_position >= size - 1) return null; // no redo left...
TransAction retval = undo.get(undo_position + 1);
return retval;
}
public Point getMaximum()
{
int maxx = 0, maxy = 0;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy entry = getJobEntry(i);
Point loc = entry.getLocation();
if (loc.x > maxx) maxx = loc.x;
if (loc.y > maxy) maxy = loc.y;
}
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta ni = getNote(i);
Point loc = ni.getLocation();
if (loc.x + ni.width > maxx) maxx = loc.x + ni.width;
if (loc.y + ni.height > maxy) maxy = loc.y + ni.height;
}
return new Point(maxx + 100, maxy + 100);
}
public Point[] getSelectedLocations()
{
int sels = nrSelected();
Point retval[] = new Point[sels];
for (int i = 0; i < sels; i++)
{
JobEntryCopy si = getSelected(i);
Point p = si.getLocation();
retval[i] = new Point(p.x, p.y); // explicit copy of location
}
return retval;
}
public JobEntryCopy[] getSelectedEntries()
{
int sels = nrSelected();
if (sels == 0) return null;
JobEntryCopy retval[] = new JobEntryCopy[sels];
for (int i = 0; i < sels; i++)
{
JobEntryCopy je = getSelected(i);
retval[i] = je;
}
return retval;
}
public int nrSelected()
{
int i, count;
count = 0;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.isSelected() && je.isDrawn()) count++;
}
return count;
}
public JobEntryCopy getSelected(int nr)
{
int i, count;
count = 0;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.isSelected())
{
if (nr == count) return je;
count++;
}
}
return null;
}
public int[] getEntryIndexes(JobEntryCopy entries[])
{
int retval[] = new int[entries.length];
for (int i = 0; i < entries.length; i++)
retval[i] = indexOfJobEntry(entries[i]);
return retval;
}
public JobEntryCopy findStart()
{
for (int i = 0; i < nrJobEntries(); i++)
{
if (getJobEntry(i).isStart()) return getJobEntry(i);
}
return null;
}
public String toString()
{
if (name != null) return name;
if (filename != null)
return filename;
else
return getClass().getName();
}
/**
* @return Returns the logfieldUsed.
*/
public boolean isLogfieldUsed()
{
return logfieldUsed;
}
/**
* @param logfieldUsed The logfieldUsed to set.
*/
public void setLogfieldUsed(boolean logfieldUsed)
{
this.logfieldUsed = logfieldUsed;
}
/**
* @return Returns the useBatchId.
*/
public boolean isBatchIdUsed()
{
return useBatchId;
}
/**
* @param useBatchId The useBatchId to set.
*/
public void setUseBatchId(boolean useBatchId)
{
this.useBatchId = useBatchId;
}
/**
* @return Returns the batchIdPassed.
*/
public boolean isBatchIdPassed()
{
return batchIdPassed;
}
/**
* @param batchIdPassed The batchIdPassed to set.
*/
public void setBatchIdPassed(boolean batchIdPassed)
{
this.batchIdPassed = batchIdPassed;
}
/**
* Builds a list of all the SQL statements that this transformation needs in order to work properly.
*
* @return An ArrayList of SQLStatement objects.
*/
public List<SQLStatement> getSQLStatements(Repository repository, IProgressMonitor monitor) throws KettleException
{
if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.GettingSQLNeededForThisJob"), nrJobEntries() + 1); //$NON-NLS-1$
List<SQLStatement> stats = new ArrayList<SQLStatement>();
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy copy = getJobEntry(i);
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.GettingSQLForJobEntryCopy") + copy + "]"); //$NON-NLS-1$ //$NON-NLS-2$
List<SQLStatement> list = copy.getEntry().getSQLStatements(repository, this);
stats.addAll(list);
if (monitor != null) monitor.worked(1);
}
// Also check the sql for the logtable...
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.GettingSQLStatementsForJobLogTables")); //$NON-NLS-1$
if (logconnection != null && logTable != null && logTable.length() > 0)
{
Database db = new Database(logconnection);
try
{
db.connect();
RowMetaInterface fields = Database.getJobLogrecordFields(false, useBatchId, logfieldUsed);
String sql = db.getDDL(logTable, fields);
if (sql != null && sql.length() > 0)
{
SQLStatement stat = new SQLStatement(Messages.getString("JobMeta.SQLFeedback.ThisJob"), logconnection, sql); //$NON-NLS-1$
stats.add(stat);
}
}
catch (KettleDatabaseException dbe)
{
SQLStatement stat = new SQLStatement(Messages.getString("JobMeta.SQLFeedback.ThisJob"), logconnection, null); //$NON-NLS-1$
stat.setError(Messages.getString("JobMeta.SQLFeedback.ErrorObtainingJobLogTableInfo") + dbe.getMessage()); //$NON-NLS-1$
stats.add(stat);
}
finally
{
db.disconnect();
}
}
if (monitor != null) monitor.worked(1);
if (monitor != null) monitor.done();
return stats;
}
/**
* @return Returns the logTable.
*/
public String getLogTable()
{
return logTable;
}
/**
* @param logTable The logTable to set.
*/
public void setLogTable(String logTable)
{
this.logTable = logTable;
}
/**
* @return Returns the arguments.
*/
public String[] getArguments()
{
return arguments;
}
/**
* @param arguments The arguments to set.
*/
public void setArguments(String[] arguments)
{
this.arguments = arguments;
}
/**
* Get a list of all the strings used in this job.
*
* @return A list of StringSearchResult with strings used in the job
*/
public List<StringSearchResult> getStringList(boolean searchSteps, boolean searchDatabases, boolean searchNotes)
{
List<StringSearchResult> stringList = new ArrayList<StringSearchResult>();
if (searchSteps)
{
// Loop over all steps in the transformation and see what the used vars are...
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy entryMeta = getJobEntry(i);
stringList.add(new StringSearchResult(entryMeta.getName(), entryMeta, this, Messages.getString("JobMeta.SearchMetadata.JobEntryName"))); //$NON-NLS-1$
if (entryMeta.getDescription() != null)
stringList.add(new StringSearchResult(entryMeta.getDescription(), entryMeta, this, Messages.getString("JobMeta.SearchMetadata.JobEntryDescription"))); //$NON-NLS-1$
JobEntryInterface metaInterface = entryMeta.getEntry();
StringSearcher.findMetaData(metaInterface, 1, stringList, entryMeta, this);
}
}
// Loop over all steps in the transformation and see what the used vars are...
if (searchDatabases)
{
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta meta = getDatabase(i);
stringList.add(new StringSearchResult(meta.getName(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseConnectionName"))); //$NON-NLS-1$
if (meta.getDatabaseName() != null) stringList.add(new StringSearchResult(meta.getDatabaseName(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseName"))); //$NON-NLS-1$
if (meta.getUsername() != null) stringList.add(new StringSearchResult(meta.getUsername(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseUsername"))); //$NON-NLS-1$
if (meta.getDatabaseTypeDesc() != null)
stringList.add(new StringSearchResult(meta.getDatabaseTypeDesc(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseTypeDescription"))); //$NON-NLS-1$
if (meta.getDatabasePortNumberString() != null)
stringList.add(new StringSearchResult(meta.getDatabasePortNumberString(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabasePort"))); //$NON-NLS-1$
}
}
// Loop over all steps in the transformation and see what the used vars are...
if (searchNotes)
{
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta meta = getNote(i);
if (meta.getNote() != null) stringList.add(new StringSearchResult(meta.getNote(), meta, this, Messages.getString("JobMeta.SearchMetadata.NotepadText"))); //$NON-NLS-1$
}
}
return stringList;
}
public List<String> getUsedVariables()
{
// Get the list of Strings.
List<StringSearchResult> stringList = getStringList(true, true, false);
List<String> varList = new ArrayList<String>();
// Look around in the strings, see what we find...
for (StringSearchResult result : stringList)
{
StringUtil.getUsedVariables(result.getString(), varList, false);
}
return varList;
}
/**
* Get an array of all the selected job entries
*
* @return A list containing all the selected & drawn job entries.
*/
public List<GUIPositionInterface> getSelectedDrawnJobEntryList()
{
List<GUIPositionInterface> list = new ArrayList<GUIPositionInterface>();
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy jobEntryCopy = getJobEntry(i);
if (jobEntryCopy.isDrawn() && jobEntryCopy.isSelected())
{
list.add( jobEntryCopy);
}
}
return list;
}
public boolean haveConnectionsChanged()
{
if (changed_databases) return true;
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta ci = getDatabase(i);
if (ci.hasChanged()) return true;
}
return false;
}
public boolean haveJobEntriesChanged()
{
if (changed_entries) return true;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy entry = getJobEntry(i);
if (entry.hasChanged()) return true;
}
return false;
}
public boolean haveJobHopsChanged()
{
if (changed_hops) return true;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.hasChanged()) return true;
}
return false;
}
public boolean haveNotesChanged()
{
if (changed_notes) return true;
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta note = getNote(i);
if (note.hasChanged()) return true;
}
return false;
}
/**
* @return the sharedObjectsFile
*/
public String getSharedObjectsFile()
{
return sharedObjectsFile;
}
/**
* @param sharedObjectsFile the sharedObjectsFile to set
*/
public void setSharedObjectsFile(String sharedObjectsFile)
{
this.sharedObjectsFile = sharedObjectsFile;
}
/**
* @param modifiedUser The modifiedUser to set.
*/
public void setModifiedUser(String modified_User)
{
modifiedUser = modified_User;
}
/**
* @return Returns the modifiedUser.
*/
public String getModifiedUser()
{
return modifiedUser;
}
/**
* @param modifiedDate The modifiedDate to set.
*/
public void setModifiedDate(Date modified_Date)
{
modifiedDate = modified_Date;
}
/**
* @return Returns the modifiedDate.
*/
public Date getModifiedDate()
{
return modifiedDate;
}
/**
* @return The description of the job
*/
public String getDescription()
{
return description;
}
/**
* @return The extended description of the job
*/
public String getExtendedDescription()
{
return extended_description;
}
/**
* @return The version of the job
*/
public String getJobversion()
{
return job_version;
}
/**
* Get the status of the job
*/
public int getJobstatus()
{
return job_status;
}
/**
* Set the description of the job.
*
* @param n The new description of the job
*/
public void setDescription(String n)
{
description = n;
}
/**
* Set the description of the job.
*
* @param n The new extended description of the job
*/
public void setExtendedDescription(String n)
{
extended_description = n;
}
/**
* Set the version of the job.
*
* @param n The new version description of the job
*/
public void setJobversion(String n)
{
job_version = n;
}
/**
* Set the status of the job.
*
* @param n The new status description of the job
*/
public void setJobstatus(int n)
{
job_status = n;
}
/**
* @return Returns the createdDate.
*/
public Date getCreatedDate()
{
return created_date;
}
/**
* @param createdDate The createdDate to set.
*/
public void setCreatedDate(Date createddate)
{
created_date = createddate;
}
/**
* @param createdUser The createdUser to set.
*/
public void setCreatedUser(String createduser)
{
created_user = createduser;
}
/**
* @return Returns the createdUser.
*/
public String getCreatedUser()
{
return created_user;
}
/**
* Find a jobentry with a certain ID in a list of job entries.
* @param jobentries The List of jobentries
* @param id_jobentry The id of the jobentry
* @return The JobEntry object if one was found, null otherwise.
*/
public static final JobEntryInterface findJobEntry(List<JobEntryInterface> jobentries, long id_jobentry)
{
if (jobentries == null)
return null;
for (JobEntryInterface je : jobentries)
{
if (je.getID() == id_jobentry) {
return je;
}
}
return null;
}
/**
* Find a jobentrycopy with a certain ID in a list of job entry copies.
* @param jobcopies The List of jobentry copies
* @param id_jobentry_copy The id of the jobentry copy
* @return The JobEntryCopy object if one was found, null otherwise.
*/
public static final JobEntryCopy findJobEntryCopy(List<JobEntryCopy> jobcopies, long id_jobentry_copy)
{
if (jobcopies == null)
return null;
for (JobEntryCopy jec : jobcopies)
{
if (jec.getID() == id_jobentry_copy) {
return jec;
}
}
return null;
}
/**
* Calls setInternalKettleVariables on the default object.
*/
public void setInternalKettleVariables()
{
setInternalKettleVariables(variables);
}
/**
* This method sets various internal kettle variables that can be used by the transformation.
*/
public void setInternalKettleVariables(VariableSpace var)
{
if (filename!=null) // we have a finename that's defined.
{
try
{
FileObject fileObject = KettleVFS.getFileObject(filename);
FileName fileName = fileObject.getName();
// The filename of the transformation
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, fileName.getBaseName());
// The directory of the transformation
FileName fileDir = fileName.getParent();
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, fileDir.getURI());
}
catch(IOException e)
{
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, "");
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, "");
}
}
else
{
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, ""); //$NON-NLS-1$
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, ""); //$NON-NLS-1$
}
// The name of the job
var.setVariable(Const.INTERNAL_VARIABLE_JOB_NAME, Const.NVL(name, "")); //$NON-NLS-1$
// The name of the directory in the repository
var.setVariable(Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY, directory != null ? directory.getPath() : ""); //$NON-NLS-1$
// Undefine the transformation specific variables:
// transformations can't run jobs, so if you use these they are 99.99% wrong.
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_NAME, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY, null);
}
public void copyVariablesFrom(VariableSpace space) {
variables.copyVariablesFrom(space);
}
public String environmentSubstitute(String aString)
{
return variables.environmentSubstitute(aString);
}
public String[] environmentSubstitute(String aString[])
{
return variables.environmentSubstitute(aString);
}
public VariableSpace getParentVariableSpace()
{
return variables.getParentVariableSpace();
}
public void setParentVariableSpace(VariableSpace parent)
{
variables.setParentVariableSpace(parent);
}
public String getVariable(String variableName, String defaultValue)
{
return variables.getVariable(variableName, defaultValue);
}
public String getVariable(String variableName)
{
return variables.getVariable(variableName);
}
public boolean getBooleanValueOfVariable(String variableName, boolean defaultValue) {
if (!Const.isEmpty(variableName))
{
String value = environmentSubstitute(variableName);
if (!Const.isEmpty(value))
{
return ValueMeta.convertStringToBoolean(value);
}
}
return defaultValue;
}
public void initializeVariablesFrom(VariableSpace parent)
{
variables.initializeVariablesFrom(parent);
}
public String[] listVariables()
{
return variables.listVariables();
}
public void setVariable(String variableName, String variableValue)
{
variables.setVariable(variableName, variableValue);
}
public void shareVariablesWith(VariableSpace space)
{
variables = space;
}
public void injectVariables(Map<String,String> prop)
{
variables.injectVariables(prop);
}
/**
* Check all job entries within the job. Each Job Entry has the opportunity to
* check their own settings.
* @param remarks List of CheckResult remarks inserted into by each JobEntry
* @param only_selected true if you only want to check the selected jobs
* @param monitor Progress monitor (not presently in use)
*/
public void checkJobEntries(List<CheckResultInterface> remarks, boolean only_selected, IProgressMonitor monitor) {
remarks.clear(); // Empty remarks
if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.VerifyingThisJobEntryTask.Title"), jobcopies.size() + 2); //$NON-NLS-1$
boolean stop_checking = false;
for (int i=0; i<jobcopies.size() && !stop_checking; i++) {
JobEntryCopy copy = jobcopies.get(i); // get the job entry copy
if ( (!only_selected) || (only_selected && copy.isSelected()) ) {
JobEntryInterface entry = copy.getEntry();
if (entry != null) {
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.VerifyingJobEntry.Title",entry.getName())); //$NON-NLS-1$ //$NON-NLS-2$
entry.check(remarks, this);
if (monitor != null) {
monitor.worked(1); // progress bar...
if (monitor.isCanceled()) {
stop_checking = true;
}
}
}
}
if (monitor != null) {
monitor.worked(1);
}
}
if (monitor != null) {
monitor.done();
}
}
public List<ResourceReference> getResourceDependencies() {
List<ResourceReference> resourceReferences = new ArrayList<ResourceReference>();
JobEntryCopy copy = null;
JobEntryInterface entry = null;
for (int i=0;i<jobcopies.size();i++) {
copy = jobcopies.get(i); // get the job entry copy
entry = copy.getEntry();
resourceReferences.addAll( entry.getResourceDependencies(this) );
}
return resourceReferences;
}
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface) throws KettleException {
try {
FileObject fileObject = KettleVFS.getFileObject(getFilename());
String name = namingInterface.nameResource(fileObject.getName().getBaseName(), fileObject.getParent().getName().getPath(), "kjb");
ResourceDefinition definition = definitions.get(name);
if (definition==null) {
// If we do this once, it will be plenty :-)
//
JobMeta jobMeta = (JobMeta) this.realClone(false);
// Add used resources, modify transMeta accordingly
// Go through the list of steps, etc.
// These critters change the steps in the cloned TransMeta
// At the end we make a new XML version of it in "exported" format...
// loop over steps, databases will be exported to XML anyway.
//
for (JobEntryCopy jobEntry: jobMeta.jobcopies) {
jobEntry.getEntry().exportResources(jobMeta, definitions, namingInterface);
}
// At the end, add ourselves to the map...
//
String transMetaContent = jobMeta.getXML();
definition = new ResourceDefinition(name, transMetaContent);
definitions.put(fileObject.getName().getPath(), definition);
}
} catch (FileSystemException e) {
throw new KettleException(Messages.getString("JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
} catch (IOException e) {
throw new KettleException(Messages.getString("JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
}
return filename;
}
/**
* @return the slaveServer list
*/
public List<SlaveServer> getSlaveServers() {
return slaveServers;
}
/**
* @param slaveServers the slaveServers to set
*/
public void setSlaveServers(List<SlaveServer> slaveServers) {
this.slaveServers = slaveServers;
}
/**
* Find a slave server using the name
* @param serverString the name of the slave server
* @return the slave server or null if we couldn't spot an approriate entry.
*/
public SlaveServer findSlaveServer(String serverString)
{
return SlaveServer.findSlaveServer(slaveServers, serverString);
}
/**
* @return An array list slave server names
*/
public String[] getSlaveServerNames()
{
return SlaveServer.getSlaveServerNames(slaveServers);
}
}
| src/org/pentaho/di/job/JobMeta.java | /**********************************************************************
** **
** This code belongs to the KETTLE project. **
** **
** Kettle, from version 2.2 on, is released into the public domain **
** under the Lesser GNU Public License (LGPL). **
** **
** For more details, please read the document LICENSE.txt, included **
** in this project **
** **
** http://www.kettle.be **
** [email protected] **
** **
**********************************************************************/
package org.pentaho.di.job;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.apache.commons.vfs.FileName;
import org.apache.commons.vfs.FileObject;
import org.apache.commons.vfs.FileSystemException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.DBCache;
import org.pentaho.di.core.EngineMetaInterface;
import org.pentaho.di.core.LastUsedFile;
import org.pentaho.di.core.NotePadMeta;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.SQLStatement;
import org.pentaho.di.core.changed.ChangedFlagInterface;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.gui.GUIPositionInterface;
import org.pentaho.di.core.gui.OverwritePrompter;
import org.pentaho.di.core.gui.Point;
import org.pentaho.di.core.gui.UndoInterface;
import org.pentaho.di.core.logging.LogWriter;
import org.pentaho.di.core.reflection.StringSearchResult;
import org.pentaho.di.core.reflection.StringSearcher;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.undo.TransAction;
import org.pentaho.di.core.util.StringUtil;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.core.xml.XMLInterface;
import org.pentaho.di.job.entries.special.JobEntrySpecial;
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectory;
import org.pentaho.di.resource.ResourceDefinition;
import org.pentaho.di.resource.ResourceExportInterface;
import org.pentaho.di.resource.ResourceNamingInterface;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.shared.SharedObjects;
import org.pentaho.di.trans.HasDatabasesInterface;
import org.pentaho.di.trans.HasSlaveServersInterface;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
/**
* Defines a Job and provides methods to load, save, verify, etc.
*
* @author Matt
* @since 11-08-2003
*
*/
public class JobMeta implements Cloneable, Comparable<JobMeta>, XMLInterface, UndoInterface,
HasDatabasesInterface, ChangedFlagInterface,
VariableSpace, EngineMetaInterface,
ResourceExportInterface, HasSlaveServersInterface
{
public static final String XML_TAG = "job"; //$NON-NLS-1$
private static final String XML_TAG_SLAVESERVERS = "slaveservers"; //$NON-NLS-1$
public LogWriter log;
protected long id;
protected String name;
protected String description;
protected String extended_description;
protected String job_version;
protected int job_status;
protected String filename;
public List<JobEntryInterface> jobentries;
public List<JobEntryCopy> jobcopies;
public List<JobHopMeta> jobhops;
public List<NotePadMeta> notes;
public List<DatabaseMeta> databases;
private List<SlaveServer> slaveServers;
protected RepositoryDirectory directory;
protected String arguments[];
protected boolean changed, changed_entries, changed_hops, changed_notes, changed_databases;
protected DatabaseMeta logconnection;
protected String logTable;
public DBCache dbcache;
protected List<TransAction> undo;
private VariableSpace variables = new Variables();
protected int max_undo;
protected int undo_position;
public static final int TYPE_UNDO_CHANGE = 1;
public static final int TYPE_UNDO_NEW = 2;
public static final int TYPE_UNDO_DELETE = 3;
public static final int TYPE_UNDO_POSITION = 4;
public static final String STRING_SPECIAL = "SPECIAL"; //$NON-NLS-1$
public static final String STRING_SPECIAL_START = "START"; //$NON-NLS-1$
public static final String STRING_SPECIAL_DUMMY = "DUMMY"; //$NON-NLS-1$
public static final String STRING_SPECIAL_OK = "OK"; //$NON-NLS-1$
public static final String STRING_SPECIAL_ERROR = "ERROR"; //$NON-NLS-1$
// Remember the size and position of the different windows...
public boolean max[] = new boolean[1];
public String created_user, modifiedUser;
public Date created_date, modifiedDate;
protected boolean useBatchId;
protected boolean batchIdPassed;
protected boolean logfieldUsed;
/** If this is null, we load from the default shared objects file : $KETTLE_HOME/.kettle/shared.xml */
protected String sharedObjectsFile;
public JobMeta(LogWriter l)
{
log = l;
clear();
initializeVariablesFrom(null);
}
public long getID()
{
return id;
}
public void setID(long id)
{
this.id = id;
}
public void clear()
{
name = null;
jobcopies = new ArrayList<JobEntryCopy>();
jobentries = new ArrayList<JobEntryInterface>();
jobhops = new ArrayList<JobHopMeta>();
notes = new ArrayList<NotePadMeta>();
databases = new ArrayList<DatabaseMeta>();
slaveServers = new ArrayList<SlaveServer>();
logconnection = null;
logTable = null;
arguments = null;
max_undo = Const.MAX_UNDO;
dbcache = DBCache.getInstance();
undo = new ArrayList<TransAction>();
undo_position = -1;
addDefaults();
setChanged(false);
created_user = "-"; //$NON-NLS-1$
created_date = new Date();
modifiedUser = "-"; //$NON-NLS-1$
modifiedDate = new Date();
directory = new RepositoryDirectory();
description=null;
job_status=-1;
job_version=null;
extended_description=null;
useBatchId=true;
logfieldUsed=true;
// setInternalKettleVariables(); Don't clear the internal variables for ad-hoc jobs, it's ruins the previews
// etc.
}
public void addDefaults()
{
/*
addStart(); // Add starting point!
addDummy(); // Add dummy!
addOK(); // errors == 0 evaluation
addError(); // errors != 0 evaluation
*/
clearChanged();
}
public static final JobEntryCopy createStartEntry()
{
JobEntrySpecial jobEntrySpecial = new JobEntrySpecial(STRING_SPECIAL_START, true, false);
JobEntryCopy jobEntry = new JobEntryCopy();
jobEntry.setID(-1L);
jobEntry.setEntry(jobEntrySpecial);
jobEntry.setLocation(50, 50);
jobEntry.setDrawn(false);
jobEntry.setDescription(Messages.getString("JobMeta.StartJobEntry.Description")); //$NON-NLS-1$
return jobEntry;
}
public static final JobEntryCopy createDummyEntry()
{
JobEntrySpecial jobEntrySpecial = new JobEntrySpecial(STRING_SPECIAL_DUMMY, false, true);
JobEntryCopy jobEntry = new JobEntryCopy();
jobEntry.setID(-1L);
jobEntry.setEntry(jobEntrySpecial);
jobEntry.setLocation(50, 50);
jobEntry.setDrawn(false);
jobEntry.setDescription(Messages.getString("JobMeta.DummyJobEntry.Description")); //$NON-NLS-1$
return jobEntry;
}
public JobEntryCopy getStart()
{
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy cge = getJobEntry(i);
if (cge.isStart()) return cge;
}
return null;
}
public JobEntryCopy getDummy()
{
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy cge = getJobEntry(i);
if (cge.isDummy()) return cge;
}
return null;
}
/**
* Compares two transformation on name, filename
*/
public int compare(JobMeta t1, JobMeta t2)
{
if (Const.isEmpty(t1.getName()) && !Const.isEmpty(t2.getName())) return -1;
if (!Const.isEmpty(t1.getName()) && Const.isEmpty(t2.getName())) return 1;
if (Const.isEmpty(t1.getName()) && Const.isEmpty(t2.getName()))
{
if (Const.isEmpty(t1.getFilename()) && !Const.isEmpty(t2.getFilename())) return -1;
if (!Const.isEmpty(t1.getFilename()) && Const.isEmpty(t2.getFilename())) return 1;
if (Const.isEmpty(t1.getFilename()) && Const.isEmpty(t2.getFilename())) { return 0; }
return t1.getFilename().compareTo(t2.getFilename());
}
return t1.getName().compareTo(t2.getName());
}
public int compareTo(JobMeta o)
{
return compare(this, o);
}
public boolean equals(Object obj)
{
if (!(obj instanceof JobMeta))
return false;
return compare(this,(JobMeta) obj) == 0;
}
public Object clone()
{
return realClone(true);
}
public Object realClone(boolean doClear)
{
try
{
JobMeta jobMeta = (JobMeta) super.clone();
if (doClear) {
jobMeta.clear();
} else {
jobMeta.jobcopies = new ArrayList<JobEntryCopy>();
jobMeta.jobentries = new ArrayList<JobEntryInterface>();
jobMeta.jobhops = new ArrayList<JobHopMeta>();
jobMeta.notes = new ArrayList<NotePadMeta>();
jobMeta.databases = new ArrayList<DatabaseMeta>();
jobMeta.slaveServers = new ArrayList<SlaveServer>();
}
for (JobEntryInterface entry : jobentries) jobMeta.jobentries.add((JobEntryInterface)entry.clone());
for (JobEntryCopy entry : jobcopies) jobMeta.jobcopies.add((JobEntryCopy)entry.clone_deep());
for (JobHopMeta entry : jobhops) jobMeta.jobhops.add((JobHopMeta)entry.clone());
for (NotePadMeta entry : notes) jobMeta.notes.add((NotePadMeta)entry.clone());
for (DatabaseMeta entry : databases) jobMeta.databases.add((DatabaseMeta)entry.clone());
for (SlaveServer slave : slaveServers) jobMeta.getSlaveServers().add((SlaveServer)slave.clone());
return jobMeta;
}
catch (CloneNotSupportedException e)
{
return null;
}
}
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
setInternalKettleVariables();
}
/**
* Builds a name - if no name is set, yet - from the filename
*/
public void nameFromFilename()
{
if (!Const.isEmpty(filename))
{
name = Const.createName(filename);
}
}
/**
* @return Returns the directory.
*/
public RepositoryDirectory getDirectory()
{
return directory;
}
/**
* @param directory The directory to set.
*/
public void setDirectory(RepositoryDirectory directory)
{
this.directory = directory;
setInternalKettleVariables();
}
public String getFilename()
{
return filename;
}
public void setFilename(String filename)
{
this.filename = filename;
setInternalKettleVariables();
}
public DatabaseMeta getLogConnection()
{
return logconnection;
}
public void setLogConnection(DatabaseMeta ci)
{
logconnection = ci;
}
/**
* @return Returns the databases.
*/
public List<DatabaseMeta> getDatabases()
{
return databases;
}
/**
* @param databases The databases to set.
*/
public void setDatabases(List<DatabaseMeta> databases)
{
this.databases = databases;
}
public void setChanged()
{
setChanged(true);
}
public void setChanged(boolean ch)
{
changed = ch;
}
public void clearChanged()
{
changed_entries = false;
changed_hops = false;
changed_notes = false;
changed_databases = false;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy entry = getJobEntry(i);
entry.setChanged(false);
}
for (JobHopMeta hi:jobhops) // Look at all the hops
{
hi.setChanged(false);
}
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta db = getDatabase(i);
db.setChanged(false);
}
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta note = getNote(i);
note.setChanged(false);
}
changed = false;
}
public boolean hasChanged()
{
if (changed) return true;
if (haveJobEntriesChanged()) return true;
if (haveJobHopsChanged()) return true;
if (haveConnectionsChanged()) return true;
if (haveNotesChanged()) return true;
return false;
}
protected void saveRepJob(Repository rep) throws KettleException
{
try
{
// The ID has to be assigned, even when it's a new item...
rep.insertJob(getID(), directory.getID(), getName(), logconnection == null ? -1 : logconnection.getID(), logTable, modifiedUser,
modifiedDate, useBatchId, batchIdPassed, logfieldUsed, sharedObjectsFile,description,extended_description,job_version,
job_status, created_user,created_date);
}
catch (KettleDatabaseException dbe)
{
throw new KettleException(Messages.getString("JobMeta.Exception.UnableToSaveJobToRepository"), dbe); //$NON-NLS-1$
}
}
public boolean showReplaceWarning(Repository rep)
{
if (getID() < 0)
{
try
{
if (rep.getJobID(getName(), directory.getID()) > 0) return true;
}
catch (KettleException dbe)
{
return true;
}
}
return false;
}
/**
* This method asks all steps in the transformation whether or not the specified database connection is used.
* The connection is used in the transformation if any of the steps uses it or if it is being used to log to.
* @param databaseMeta The connection to check
* @return true if the connection is used in this transformation.
*/
public boolean isDatabaseConnectionUsed(DatabaseMeta databaseMeta)
{
for (int i=0;i<nrJobEntries();i++)
{
JobEntryCopy jobEntry = getJobEntry(i);
DatabaseMeta dbs[] = jobEntry.getEntry().getUsedDatabaseConnections();
for (int d=0;d<dbs.length;d++)
{
if (dbs[d]!=null && dbs[d].equals(databaseMeta)) return true;
}
}
if (logconnection!=null && logconnection.equals(databaseMeta)) return true;
return false;
}
public String getFileType() {
return LastUsedFile.FILE_TYPE_JOB;
}
public String[] getFilterNames() {
return Const.getJobFilterNames();
}
public String[] getFilterExtensions() {
return Const.STRING_JOB_FILTER_EXT;
}
public String getDefaultExtension() {
return Const.STRING_JOB_DEFAULT_EXT;
}
public String getXML()
{
Props props = null;
if (Props.isInitialized()) props=Props.getInstance();
DatabaseMeta ci = getLogConnection();
StringBuffer retval = new StringBuffer(500);
retval.append("<").append(XML_TAG).append(">").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", getName())); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("description", description)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("extended_description", extended_description));
retval.append(" ").append(XMLHandler.addTagValue("job_version", job_version));
if ( job_status >= 0 )
{
retval.append(" ").append(XMLHandler.addTagValue("job_status", job_status));
}
retval.append(" ").append(XMLHandler.addTagValue("directory", directory.getPath())); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("created_user", created_user)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("created_date", XMLHandler.date2string(created_date))); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
retval.append(" ").append(XMLHandler.addTagValue("modified_user", modifiedUser)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("modified_date", XMLHandler.date2string(created_date))); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
// Let's add the last known file location if we have any...
//
if (!Const.isEmpty(filename)) {
retval.append(" ").append(XMLHandler.addTagValue("filename", filename)); //$NON-NLS-1$ //$NON-NLS-2$
}
// Save the database connections...
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta dbMeta = getDatabase(i);
if (props!=null && props.areOnlyUsedConnectionsSavedToXML())
{
if (isDatabaseConnectionUsed(dbMeta))
{
retval.append(dbMeta.getXML());
}
}
else
{
retval.append(dbMeta.getXML());
}
}
// The slave servers...
//
retval.append(" ").append(XMLHandler.openTag(XML_TAG_SLAVESERVERS)).append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < slaveServers.size(); i++)
{
SlaveServer slaveServer = slaveServers.get(i);
retval.append(" ").append(slaveServer.getXML()).append(Const.CR);
}
retval.append(" ").append(XMLHandler.closeTag(XML_TAG_SLAVESERVERS)).append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("logconnection", ci == null ? "" : ci.getName())); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
retval.append(" ").append(XMLHandler.addTagValue("logtable", logTable)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("use_batchid", useBatchId)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("pass_batchid", batchIdPassed)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("use_logfield", logfieldUsed)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("shared_objects_file", sharedObjectsFile)); // $NON-NLS-1$
retval.append(" <entries>").append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy jge = getJobEntry(i);
retval.append(jge.getXML());
}
retval.append(" </entries>").append(Const.CR); //$NON-NLS-1$
retval.append(" <hops>").append(Const.CR); //$NON-NLS-1$
for (JobHopMeta hi:jobhops) // Look at all the hops
{
retval.append(hi.getXML());
}
retval.append(" </hops>").append(Const.CR); //$NON-NLS-1$
retval.append(" <notepads>").append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta ni = getNote(i);
retval.append(ni.getXML());
}
retval.append(" </notepads>").append(Const.CR); //$NON-NLS-1$
retval.append("</").append(XML_TAG).append(">").append(Const.CR); //$NON-NLS-1$
return retval.toString();
}
public JobMeta(LogWriter log, String fname, Repository rep) throws KettleXMLException
{
this(log, null, fname, rep, null);
}
public JobMeta(LogWriter log, String fname, Repository rep, OverwritePrompter prompter) throws KettleXMLException
{
this(log, null, fname, rep, prompter);
}
/**
* Load the job from the XML file specified.
*
* @param log the logging channel
* @param fname The filename to load as a job
* @param rep The repository to bind againt, null if there is no repository available.
* @throws KettleXMLException
*/
public JobMeta(LogWriter log, VariableSpace parentSpace, String fname, Repository rep, OverwritePrompter prompter) throws KettleXMLException
{
this.log = log;
this.initializeVariablesFrom(parentSpace);
try
{
// OK, try to load using the VFS stuff...
Document doc = XMLHandler.loadXMLFile(KettleVFS.getFileObject(fname));
if (doc != null)
{
// Clear the job
clear();
// The jobnode
Node jobnode = XMLHandler.getSubNode(doc, XML_TAG);
loadXML(jobnode, rep, prompter);
// Do this at the end
setFilename(fname);
}
else
{
throw new KettleXMLException(Messages.getString("JobMeta.Exception.ErrorReadingFromXMLFile") + fname); //$NON-NLS-1$
}
}
catch (Exception e)
{
throw new KettleXMLException(Messages.getString("JobMeta.Exception.UnableToLoadJobFromXMLFile") + fname + "]", e); //$NON-NLS-1$ //$NON-NLS-2$
}
}
public JobMeta(LogWriter log, Node jobnode, Repository rep, OverwritePrompter prompter) throws KettleXMLException
{
this.log = log;
loadXML(jobnode, rep, prompter);
}
public boolean isRepReference() {
return isRepReference(getFilename(), this.getName());
}
public boolean isFileReference() {
return !isRepReference(getFilename(), this.getName());
}
public static boolean isRepReference(String fileName, String transName) {
return Const.isEmpty(fileName) && !Const.isEmpty(transName);
}
public static boolean isFileReference(String fileName, String transName) {
return !isRepReference(fileName, transName);
}
public void loadXML(Node jobnode, Repository rep, OverwritePrompter prompter ) throws KettleXMLException
{
Props props = null;
if (Props.isInitialized()) props = Props.getInstance();
try
{
// clear the jobs;
clear();
//
// get job info:
//
name = XMLHandler.getTagValue(jobnode, "name"); //$NON-NLS-1$
// description
description = XMLHandler.getTagValue(jobnode, "description");
// extended description
extended_description = XMLHandler.getTagValue(jobnode, "extended_description");
// job version
job_version = XMLHandler.getTagValue(jobnode, "job_version");
// job status
job_status = Const.toInt(XMLHandler.getTagValue(jobnode, "job_status"),-1);
// Created user/date
created_user = XMLHandler.getTagValue(jobnode, "created_user"); //$NON-NLS-1$
String createDate = XMLHandler.getTagValue(jobnode, "created_date"); //$NON-NLS-1$
if (createDate != null)
{
created_date = XMLHandler.stringToDate(createDate);
}
// Changed user/date
modifiedUser = XMLHandler.getTagValue(jobnode, "modified_user"); //$NON-NLS-1$
String modDate = XMLHandler.getTagValue(jobnode, "modified_date"); //$NON-NLS-1$
if (modDate != null)
{
modifiedDate = XMLHandler.stringToDate(modDate);
}
// Also load and set the filename
//
filename = XMLHandler.getTagValue(jobnode, "filename"); //$NON-NLS-1$
// Load the default list of databases
// Read objects from the shared XML file & the repository
try
{
sharedObjectsFile = XMLHandler.getTagValue(jobnode, "shared_objects_file"); //$NON-NLS-1$ //$NON-NLS-2$
readSharedObjects(rep);
}
catch(Exception e)
{
LogWriter.getInstance().logError(toString(), Messages.getString("JobMeta.ErrorReadingSharedObjects.Message", e.toString())); // $NON-NLS-1$ //$NON-NLS-1$
LogWriter.getInstance().logError(toString(), Const.getStackTracker(e));
}
//
// Read the database connections
//
int nr = XMLHandler.countNodes(jobnode, "connection"); //$NON-NLS-1$
for (int i = 0; i < nr; i++)
{
Node dbnode = XMLHandler.getSubNodeByNr(jobnode, "connection", i); //$NON-NLS-1$
DatabaseMeta dbcon = new DatabaseMeta(dbnode);
DatabaseMeta exist = findDatabase(dbcon.getName());
if (exist == null)
{
addDatabase(dbcon);
}
else
{
boolean askOverwrite = Props.isInitialized() ? props.askAboutReplacingDatabaseConnections() : false;
boolean overwrite = Props.isInitialized() ? props.replaceExistingDatabaseConnections() : true;
if (askOverwrite && prompter != null)
{
overwrite = prompter.overwritePrompt(
Messages.getString("JobMeta.Dialog.ConnectionExistsOverWrite.Message", dbcon.getName() ),
Messages.getString("JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage"),
Props.STRING_ASK_ABOUT_REPLACING_DATABASES);
}
if (overwrite)
{
int idx = indexOfDatabase(exist);
removeDatabase(idx);
addDatabase(idx, dbcon);
}
}
}
// Read the slave servers...
//
Node slaveServersNode = XMLHandler.getSubNode(jobnode, XML_TAG_SLAVESERVERS); //$NON-NLS-1$
int nrSlaveServers = XMLHandler.countNodes(slaveServersNode, SlaveServer.XML_TAG); //$NON-NLS-1$
for (int i = 0 ; i < nrSlaveServers ; i++)
{
Node slaveServerNode = XMLHandler.getSubNodeByNr(slaveServersNode, SlaveServer.XML_TAG, i);
SlaveServer slaveServer = new SlaveServer(slaveServerNode);
// Check if the object exists and if it's a shared object.
// If so, then we will keep the shared version, not this one.
// The stored XML is only for backup purposes.
SlaveServer check = findSlaveServer(slaveServer.getName());
if (check!=null)
{
if (!check.isShared()) // we don't overwrite shared objects.
{
addOrReplaceSlaveServer(slaveServer);
}
}
else
{
slaveServers.add(slaveServer);
}
}
/*
* Get the log database connection & log table
*/
String logcon = XMLHandler.getTagValue(jobnode, "logconnection"); //$NON-NLS-1$
logconnection = findDatabase(logcon);
logTable = XMLHandler.getTagValue(jobnode, "logtable"); //$NON-NLS-1$
useBatchId = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_batchid")); //$NON-NLS-1$ //$NON-NLS-2$
batchIdPassed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "pass_batchid")); //$NON-NLS-1$ //$NON-NLS-2$
logfieldUsed = "Y".equalsIgnoreCase(XMLHandler.getTagValue(jobnode, "use_logfield")); //$NON-NLS-1$ //$NON-NLS-2$
/*
* read the job entries...
*/
Node entriesnode = XMLHandler.getSubNode(jobnode, "entries"); //$NON-NLS-1$
int tr = XMLHandler.countNodes(entriesnode, "entry"); //$NON-NLS-1$
for (int i = 0; i < tr; i++)
{
Node entrynode = XMLHandler.getSubNodeByNr(entriesnode, "entry", i); //$NON-NLS-1$
// System.out.println("Reading entry:\n"+entrynode);
JobEntryCopy je = new JobEntryCopy(entrynode, databases, slaveServers, rep);
JobEntryCopy prev = findJobEntry(je.getName(), 0, true);
if (prev != null)
{
if (je.getNr() == 0) // See if the #0 already exists!
{
// Replace previous version with this one: remove it first
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
}
else
if (je.getNr() > 0) // Use previously defined JobEntry info!
{
je.setEntry(prev.getEntry());
// See if entry already exists...
prev = findJobEntry(je.getName(), je.getNr(), true);
if (prev != null) // remove the old one!
{
int idx = indexOfJobEntry(prev);
removeJobEntry(idx);
}
}
}
// Add the JobEntryCopy...
addJobEntry(je);
}
Node hopsnode = XMLHandler.getSubNode(jobnode, "hops"); //$NON-NLS-1$
int ho = XMLHandler.countNodes(hopsnode, "hop"); //$NON-NLS-1$
for (int i = 0; i < ho; i++)
{
Node hopnode = XMLHandler.getSubNodeByNr(hopsnode, "hop", i); //$NON-NLS-1$
JobHopMeta hi = new JobHopMeta(hopnode, this);
jobhops.add(hi);
}
// Read the notes...
Node notepadsnode = XMLHandler.getSubNode(jobnode, "notepads"); //$NON-NLS-1$
int nrnotes = XMLHandler.countNodes(notepadsnode, "notepad"); //$NON-NLS-1$
for (int i = 0; i < nrnotes; i++)
{
Node notepadnode = XMLHandler.getSubNodeByNr(notepadsnode, "notepad", i); //$NON-NLS-1$
NotePadMeta ni = new NotePadMeta(notepadnode);
notes.add(ni);
}
clearChanged();
}
catch (Exception e)
{
throw new KettleXMLException(Messages.getString("JobMeta.Exception.UnableToLoadJobFromXMLNode"), e); //$NON-NLS-1$
}
finally
{
setInternalKettleVariables();
}
}
/**
* Read the database connections in the repository and add them to this job if they are not yet present.
*
* @param rep The repository to load the database connections from.
* @throws KettleException
*/
public void readDatabases(Repository rep) throws KettleException
{
readDatabases(rep, true);
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.trans.HasDatabaseInterface#readDatabases(org.pentaho.di.repository.Repository, boolean)
*/
public void readDatabases(Repository rep, boolean overWriteShared) throws KettleException
{
try
{
long dbids[] = rep.getDatabaseIDs();
for (int i = 0; i < dbids.length; i++)
{
DatabaseMeta databaseMeta = new DatabaseMeta(rep, dbids[i]);
DatabaseMeta check = findDatabase(databaseMeta.getName()); // Check if there already is one in the
// transformation
if (check == null || overWriteShared) // We only add, never overwrite database connections.
{
if (databaseMeta.getName() != null)
{
addOrReplaceDatabase(databaseMeta);
if (!overWriteShared) databaseMeta.setChanged(false);
}
}
}
setChanged(false);
}
catch (KettleDatabaseException dbe)
{
throw new KettleException(Messages.getString("JobMeta.Log.UnableToReadDatabaseIDSFromRepository"), dbe); //$NON-NLS-1$
}
catch (KettleException ke)
{
throw new KettleException(Messages.getString("JobMeta.Log.UnableToReadDatabasesFromRepository"), ke); //$NON-NLS-1$
}
}
public void readSharedObjects(Repository rep) throws KettleException
{
// Extract the shared steps, connections, etc. using the SharedObjects class
//
String soFile = environmentSubstitute(sharedObjectsFile);
SharedObjects sharedObjects = new SharedObjects(soFile);
Map<?, SharedObjectInterface> objectsMap = sharedObjects.getObjectsMap();
// First read the databases...
// We read databases & slaves first because there might be dependencies that need to be resolved.
//
for (SharedObjectInterface object : objectsMap.values())
{
if (object instanceof DatabaseMeta)
{
DatabaseMeta databaseMeta = (DatabaseMeta) object;
addOrReplaceDatabase(databaseMeta);
}
else if (object instanceof SlaveServer)
{
SlaveServer slaveServer = (SlaveServer) object;
addOrReplaceSlaveServer(slaveServer);
}
}
if (rep!=null)
{
readDatabases(rep, true);
}
}
public boolean saveSharedObjects()
{
try
{
// First load all the shared objects...
String soFile = environmentSubstitute(sharedObjectsFile);
SharedObjects sharedObjects = new SharedObjects(soFile);
// Now overwrite the objects in there
List<Object> shared = new ArrayList<Object>();
shared.addAll(databases);
shared.addAll(slaveServers);
// The databases connections...
for (int i=0;i<shared.size();i++)
{
SharedObjectInterface sharedObject = (SharedObjectInterface) shared.get(i);
if (sharedObject.isShared())
{
sharedObjects.storeObject(sharedObject);
}
}
// Save the objects
sharedObjects.saveToFile();
return true;
}
catch(Exception e)
{
log.logError(toString(), "Unable to save shared ojects: "+e.toString());
return false;
}
}
/**
* Find a database connection by it's name
*
* @param name The database name to look for
* @return The database connection or null if nothing was found.
*/
public DatabaseMeta findDatabase(String name)
{
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta ci = getDatabase(i);
if (ci.getName().equalsIgnoreCase(name)) { return ci; }
}
return null;
}
public void saveRep(Repository rep) throws KettleException
{
saveRep(rep, null);
}
public void saveRep(Repository rep, IProgressMonitor monitor) throws KettleException
{
try
{
int nrWorks = 2 + nrDatabases() + nrNotes() + nrJobEntries() + nrJobHops();
if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.SavingTransformation") + directory + Const.FILE_SEPARATOR + getName(), nrWorks); //$NON-NLS-1$
rep.lockRepository();
rep.insertLogEntry("save job '"+getName()+"'"); //$NON-NLS-1$ //$NON-NLS-2$
// Before we start, make sure we have a valid job ID!
// Two possibilities:
// 1) We have a ID: keep it
// 2) We don't have an ID: look it up.
// If we find a transformation with the same name: ask!
//
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.HandlingPreviousVersionOfJob")); //$NON-NLS-1$
setID(rep.getJobID(getName(), directory.getID()));
// If no valid id is available in the database, assign one...
if (getID() <= 0)
{
setID(rep.getNextJobID());
}
else
{
// If we have a valid ID, we need to make sure everything is cleared out
// of the database for this id_job, before we put it back in...
rep.delAllFromJob(getID());
}
if (monitor != null) monitor.worked(1);
// Now, save the job entry in R_JOB
// Note, we save this first so that we have an ID in the database.
// Everything else depends on this ID, including recursive job entries to the save job. (retry)
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.SavingJobDetails")); //$NON-NLS-1$
log.logDetailed(toString(), "Saving job info to repository..."); //$NON-NLS-1$
saveRepJob(rep);
if (monitor != null) monitor.worked(1);
// Save the slaves
//
for (int i=0;i<slaveServers.size();i++)
{
SlaveServer slaveServer = slaveServers.get(i);
slaveServer.saveRep(rep, getID(), false);
}
//
// Save the notes
//
log.logDetailed(toString(), "Saving notes to repository..."); //$NON-NLS-1$
for (int i = 0; i < nrNotes(); i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.SavingNoteNr") + (i + 1) + "/" + nrNotes()); //$NON-NLS-1$ //$NON-NLS-2$
NotePadMeta ni = getNote(i);
ni.saveRep(rep, getID());
if (ni.getID() > 0)
{
rep.insertJobNote(getID(), ni.getID());
}
if (monitor != null) monitor.worked(1);
}
//
// Save the job entries
//
log.logDetailed(toString(), "Saving " + nrJobEntries() + " Job enty copies to repository..."); //$NON-NLS-1$ //$NON-NLS-2$
rep.updateJobEntryTypes();
for (int i = 0; i < nrJobEntries(); i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.SavingJobEntryNr") + (i + 1) + "/" + nrJobEntries()); //$NON-NLS-1$ //$NON-NLS-2$
JobEntryCopy cge = getJobEntry(i);
cge.saveRep(rep, getID());
if (monitor != null) monitor.worked(1);
}
log.logDetailed(toString(), "Saving job hops to repository..."); //$NON-NLS-1$
for (int i = 0; i < nrJobHops(); i++)
{
if (monitor != null) monitor.subTask("Saving job hop #" + (i + 1) + "/" + nrJobHops()); //$NON-NLS-1$ //$NON-NLS-2$
JobHopMeta hi = getJobHop(i);
hi.saveRep(rep, getID());
if (monitor != null) monitor.worked(1);
}
// Commit this transaction!!
rep.commit();
clearChanged();
if (monitor != null) monitor.done();
}
catch (KettleDatabaseException dbe)
{
rep.rollback();
throw new KettleException(Messages.getString("JobMeta.Exception.UnableToSaveJobInRepositoryRollbackPerformed"), dbe); //$NON-NLS-1$
}
finally
{
// don't forget to unlock the repository.
// Normally this is done by the commit / rollback statement, but hey there are some freaky database out
// there...
rep.unlockRepository();
}
}
/**
* Load a job in a directory
*
* @param log the logging channel
* @param rep The Repository
* @param jobname The name of the job
* @param repdir The directory in which the job resides.
* @throws KettleException
*/
public JobMeta(LogWriter log, Repository rep, String jobname, RepositoryDirectory repdir) throws KettleException
{
this(log, rep, jobname, repdir, null);
}
/**
* Load a job in a directory
*
* @param log the logging channel
* @param rep The Repository
* @param jobname The name of the job
* @param repdir The directory in which the job resides.
* @throws KettleException
*/
public JobMeta(LogWriter log, Repository rep, String jobname, RepositoryDirectory repdir, IProgressMonitor monitor) throws KettleException
{
this.log = log;
try
{
// Clear everything...
clear();
directory = repdir;
// Get the transformation id
setID(rep.getJobID(jobname, repdir.getID()));
// If no valid id is available in the database, then give error...
if (getID() > 0)
{
// Load the notes...
long noteids[] = rep.getJobNoteIDs(getID());
long jecids[] = rep.getJobEntryCopyIDs(getID());
long hopid[] = rep.getJobHopIDs(getID());
int nrWork = 2 + noteids.length + jecids.length + hopid.length;
if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork); //$NON-NLS-1$
//
// get job info:
//
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingJobInformation")); //$NON-NLS-1$
RowMetaAndData jobRow = rep.getJob(getID());
name = jobRow.getString("NAME", null); //$NON-NLS-1$
description = jobRow.getString("DESCRIPTION", null); //$NON-NLS-1$
extended_description = jobRow.getString("EXTENDED_DESCRIPTION", null); //$NON-NLS-1$
job_version = jobRow.getString("JOB_VERSION", null); //$NON-NLS-1$
job_status = Const.toInt(jobRow.getString("JOB_STATUS", null),-1); //$NON-NLS-1$
logTable = jobRow.getString("TABLE_NAME_LOG", null); //$NON-NLS-1$
created_user = jobRow.getString("CREATED_USER", null); //$NON-NLS-1$
created_date = jobRow.getDate("CREATED_DATE", new Date()); //$NON-NLS-1$
modifiedUser = jobRow.getString("MODIFIED_USER", null); //$NON-NLS-1$
modifiedDate = jobRow.getDate("MODIFIED_DATE", new Date()); //$NON-NLS-1$
long id_logdb = jobRow.getInteger("ID_DATABASE_LOG", 0); //$NON-NLS-1$
if (id_logdb > 0)
{
// Get the logconnection
logconnection = new DatabaseMeta(rep, id_logdb);
}
useBatchId = jobRow.getBoolean("USE_BATCH_ID", false); //$NON-NLS-1$
batchIdPassed = jobRow.getBoolean("PASS_BATCH_ID", false); //$NON-NLS-1$
logfieldUsed = jobRow.getBoolean("USE_LOGFIELD", false); //$NON-NLS-1$
if (monitor != null) monitor.worked(1);
//
// Load the common database connections
//
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingAvailableDatabasesFromRepository")); //$NON-NLS-1$
// Read objects from the shared XML file & the repository
try
{
sharedObjectsFile = jobRow.getString("SHARED_FILE", null);
readSharedObjects(rep);
}
catch(Exception e)
{
LogWriter.getInstance().logError(toString(), Messages.getString("JobMeta.ErrorReadingSharedObjects.Message", e.toString())); // $NON-NLS-1$ //$NON-NLS-1$
LogWriter.getInstance().logError(toString(), Const.getStackTracker(e));
}
if (monitor != null) monitor.worked(1);
log.logDetailed(toString(), "Loading " + noteids.length + " notes"); //$NON-NLS-1$ //$NON-NLS-2$
for (int i = 0; i < noteids.length; i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length); //$NON-NLS-1$ //$NON-NLS-2$
NotePadMeta ni = new NotePadMeta(log, rep, noteids[i]);
if (indexOfNote(ni) < 0) addNote(ni);
if (monitor != null) monitor.worked(1);
}
// Load the job entries...
log.logDetailed(toString(), "Loading " + jecids.length + " job entries"); //$NON-NLS-1$ //$NON-NLS-2$
for (int i = 0; i < jecids.length; i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length)); //$NON-NLS-1$ //$NON-NLS-2$
JobEntryCopy jec = new JobEntryCopy(log, rep, getID(), jecids[i], jobentries, databases, slaveServers);
// Also set the copy number...
// We count the number of job entry copies that use the job entry
//
int copyNr = 0;
for (JobEntryCopy copy : jobcopies) {
if (jec.getEntry()==copy.getEntry()) {
copyNr++;
}
}
jec.setNr(copyNr);
int idx = indexOfJobEntry(jec);
if (idx < 0)
{
if (jec.getName() != null && jec.getName().length() > 0) addJobEntry(jec);
}
else
{
setJobEntry(idx, jec); // replace it!
}
if (monitor != null) monitor.worked(1);
}
// Load the hops...
log.logDetailed(toString(), "Loading " + hopid.length + " job hops"); //$NON-NLS-1$ //$NON-NLS-2$
for (int i = 0; i < hopid.length; i++)
{
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length)); //$NON-NLS-1$ //$NON-NLS-2$
JobHopMeta hi = new JobHopMeta(rep, hopid[i], this, jobcopies);
jobhops.add(hi);
if (monitor != null) monitor.worked(1);
}
// Finally, clear the changed flags...
clearChanged();
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.FinishedLoadOfJob")); //$NON-NLS-1$
if (monitor != null) monitor.done();
}
else
{
throw new KettleException(Messages.getString("JobMeta.Exception.CanNotFindJob") + jobname); //$NON-NLS-1$
}
}
catch (KettleException dbe)
{
throw new KettleException(Messages.getString("JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
}
finally
{
setInternalKettleVariables();
}
}
public JobEntryCopy getJobEntryCopy(int x, int y, int iconsize)
{
int i, s;
s = nrJobEntries();
for (i = s - 1; i >= 0; i--) // Back to front because drawing goes from start to end
{
JobEntryCopy je = getJobEntry(i);
Point p = je.getLocation();
if (p != null)
{
if (x >= p.x && x <= p.x + iconsize && y >= p.y && y <= p.y + iconsize) { return je; }
}
}
return null;
}
public int nrJobEntries()
{
return jobcopies.size();
}
public int nrJobHops()
{
return jobhops.size();
}
public int nrNotes()
{
return notes.size();
}
public int nrDatabases()
{
return databases.size();
}
public JobHopMeta getJobHop(int i)
{
return jobhops.get(i);
}
public JobEntryCopy getJobEntry(int i)
{
return jobcopies.get(i);
}
public NotePadMeta getNote(int i)
{
return notes.get(i);
}
public DatabaseMeta getDatabase(int i)
{
return databases.get(i);
}
public void addJobEntry(JobEntryCopy je)
{
jobcopies.add(je);
setChanged();
}
public void addJobHop(JobHopMeta hi)
{
jobhops.add(hi);
setChanged();
}
public void addNote(NotePadMeta ni)
{
notes.add(ni);
setChanged();
}
public void addDatabase(DatabaseMeta ci)
{
databases.add(ci);
changed_databases = true;
}
public void addJobEntry(int p, JobEntryCopy si)
{
jobcopies.add(p, si);
changed_entries = true;
}
public void addJobHop(int p, JobHopMeta hi)
{
jobhops.add(p, hi);
changed_hops = true;
}
public void addNote(int p, NotePadMeta ni)
{
notes.add(p, ni);
changed_notes = true;
}
public void addDatabase(int p, DatabaseMeta ci)
{
databases.add(p, ci);
changed_databases = true;
}
/*
* (non-Javadoc)
*
* @see org.pentaho.di.trans.HasDatabaseInterface#addOrReplaceDatabase(org.pentaho.di.core.database.DatabaseMeta)
*/
public void addOrReplaceDatabase(DatabaseMeta databaseMeta)
{
int index = databases.indexOf(databaseMeta);
if (index < 0)
{
databases.add(databaseMeta);
}
else
{
DatabaseMeta previous = getDatabase(index);
previous.replaceMeta(databaseMeta);
}
changed_databases = true;
}
/**
* Add a new slave server to the transformation if that didn't exist yet.
* Otherwise, replace it.
*
* @param slaveServer The slave server to be added.
*/
public void addOrReplaceSlaveServer(SlaveServer slaveServer)
{
int index = slaveServers.indexOf(slaveServer);
if (index<0)
{
slaveServers.add(slaveServer);
}
else
{
SlaveServer previous = slaveServers.get(index);
previous.replaceMeta(slaveServer);
}
setChanged();
}
public void removeJobEntry(int i)
{
jobcopies.remove(i);
setChanged();
}
public void removeJobHop(int i)
{
jobhops.remove(i);
setChanged();
}
public void removeNote(int i)
{
notes.remove(i);
setChanged();
}
public void raiseNote(int p)
{
// if valid index and not last index
if ((p >=0) && (p < notes.size()-1))
{
NotePadMeta note = notes.remove(p);
notes.add(note);
changed_notes = true;
}
}
public void lowerNote(int p)
{
// if valid index and not first index
if ((p >0) && (p < notes.size()))
{
NotePadMeta note = notes.remove(p);
notes.add(0, note);
changed_notes = true;
}
}
public void removeDatabase(int i)
{
if (i < 0 || i >= databases.size()) return;
databases.remove(i);
changed_databases = true;
}
public int indexOfJobHop(JobHopMeta he)
{
return jobhops.indexOf(he);
}
public int indexOfNote(NotePadMeta ni)
{
return notes.indexOf(ni);
}
public int indexOfJobEntry(JobEntryCopy ge)
{
return jobcopies.indexOf(ge);
}
public int indexOfDatabase(DatabaseMeta di)
{
return databases.indexOf(di);
}
public void setJobEntry(int idx, JobEntryCopy jec)
{
jobcopies.set(idx, jec);
}
/**
* Find an existing JobEntryCopy by it's name and number
*
* @param name The name of the job entry copy
* @param nr The number of the job entry copy
* @return The JobEntryCopy or null if nothing was found!
*/
public JobEntryCopy findJobEntry(String name, int nr, boolean searchHiddenToo)
{
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy jec = getJobEntry(i);
if (jec.getName().equalsIgnoreCase(name) && jec.getNr() == nr)
{
if (searchHiddenToo || jec.isDrawn()) { return jec; }
}
}
return null;
}
public JobEntryCopy findJobEntry(String full_name_nr)
{
int i;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy jec = getJobEntry(i);
JobEntryInterface je = jec.getEntry();
if (je.toString().equalsIgnoreCase(full_name_nr)) { return jec; }
}
return null;
}
public JobHopMeta findJobHop(String name)
{
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.toString().equalsIgnoreCase(name)) { return hi; }
}
return null;
}
public JobHopMeta findJobHopFrom(JobEntryCopy jge)
{
if (jge != null) {
for (JobHopMeta hi:jobhops)
{
if (hi!=null && (hi.from_entry != null) && hi.from_entry.equals(jge)) // return the first
{
return hi;
}
}
}
return null;
}
public JobHopMeta findJobHop(JobEntryCopy from, JobEntryCopy to)
{
for (JobHopMeta hi:jobhops)
{
if (hi.isEnabled())
{
if (hi != null && hi.from_entry != null && hi.to_entry != null && hi.from_entry.equals(from) && hi.to_entry.equals(to)) { return hi; }
}
}
return null;
}
public JobHopMeta findJobHopTo(JobEntryCopy jge)
{
for (JobHopMeta hi:jobhops)
{
if (hi != null && hi.to_entry != null && hi.to_entry.equals(jge)) // Return the first!
{ return hi; }
}
return null;
}
public int findNrPrevJobEntries(JobEntryCopy from)
{
return findNrPrevJobEntries(from, false);
}
public JobEntryCopy findPrevJobEntry(JobEntryCopy to, int nr)
{
return findPrevJobEntry(to, nr, false);
}
public int findNrPrevJobEntries(JobEntryCopy to, boolean info)
{
int count = 0;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.isEnabled() && hi.to_entry.equals(to))
{
count++;
}
}
return count;
}
public JobEntryCopy findPrevJobEntry(JobEntryCopy to, int nr, boolean info)
{
int count = 0;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.isEnabled() && hi.to_entry.equals(to))
{
if (count == nr) { return hi.from_entry; }
count++;
}
}
return null;
}
public int findNrNextJobEntries(JobEntryCopy from)
{
int count = 0;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.isEnabled() && (hi.from_entry != null) && hi.from_entry.equals(from)) count++;
}
return count;
}
public JobEntryCopy findNextJobEntry(JobEntryCopy from, int cnt)
{
int count = 0;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.isEnabled() && (hi.from_entry != null) && hi.from_entry.equals(from))
{
if (count == cnt) { return hi.to_entry; }
count++;
}
}
return null;
}
public boolean hasLoop(JobEntryCopy entry)
{
return hasLoop(entry, null);
}
public boolean hasLoop(JobEntryCopy entry, JobEntryCopy lookup)
{
return false;
}
public boolean isEntryUsedInHops(JobEntryCopy jge)
{
JobHopMeta fr = findJobHopFrom(jge);
JobHopMeta to = findJobHopTo(jge);
if (fr != null || to != null) return true;
return false;
}
public int countEntries(String name)
{
int count = 0;
int i;
for (i = 0; i < nrJobEntries(); i++) // Look at all the hops;
{
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name)) count++;
}
return count;
}
public int generateJobEntryNameNr(String basename)
{
int nr = 1;
JobEntryCopy e = findJobEntry(basename + " " + nr, 0, true); //$NON-NLS-1$
while (e != null)
{
nr++;
e = findJobEntry(basename + " " + nr, 0, true); //$NON-NLS-1$
}
return nr;
}
public int findUnusedNr(String name)
{
int nr = 1;
JobEntryCopy je = findJobEntry(name, nr, true);
while (je != null)
{
nr++;
// log.logDebug("findUnusedNr()", "Trying unused nr: "+nr);
je = findJobEntry(name, nr, true);
}
return nr;
}
public int findMaxNr(String name)
{
int max = 0;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name))
{
if (je.getNr() > max) max = je.getNr();
}
}
return max;
}
/**
* Proposes an alternative job entry name when the original already exists...
*
* @param entryname The job entry name to find an alternative for..
* @return The alternative stepname.
*/
public String getAlternativeJobentryName(String entryname)
{
String newname = entryname;
JobEntryCopy jec = findJobEntry(newname);
int nr = 1;
while (jec != null)
{
nr++;
newname = entryname + " " + nr; //$NON-NLS-1$
jec = findJobEntry(newname);
}
return newname;
}
public JobEntryCopy[] getAllJobGraphEntries(String name)
{
int count = 0;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name)) count++;
}
JobEntryCopy retval[] = new JobEntryCopy[count];
count = 0;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.getName().equalsIgnoreCase(name))
{
retval[count] = je;
count++;
}
}
return retval;
}
public JobHopMeta[] getAllJobHopsUsing(String name)
{
List<JobHopMeta> hops = new ArrayList<JobHopMeta>();
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.from_entry != null && hi.to_entry != null)
{
if (hi.from_entry.getName().equalsIgnoreCase(name) || hi.to_entry.getName().equalsIgnoreCase(name))
{
hops.add(hi);
}
}
}
return hops.toArray(new JobHopMeta[hops.size()]);
}
public NotePadMeta getNote(int x, int y)
{
int i, s;
s = notes.size();
for (i = s - 1; i >= 0; i--) // Back to front because drawing goes from start to end
{
NotePadMeta ni = notes.get(i);
Point loc = ni.getLocation();
Point p = new Point(loc.x, loc.y);
if (x >= p.x && x <= p.x + ni.width + 2 * Const.NOTE_MARGIN && y >= p.y && y <= p.y + ni.height + 2 * Const.NOTE_MARGIN) { return ni; }
}
return null;
}
public void selectAll()
{
int i;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy ce = getJobEntry(i);
ce.setSelected(true);
}
}
public void unselectAll()
{
int i;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy ce = getJobEntry(i);
ce.setSelected(false);
}
}
public int getMaxUndo()
{
return max_undo;
}
public void setMaxUndo(int mu)
{
max_undo = mu;
while (undo.size() > mu && undo.size() > 0)
undo.remove(0);
}
public int getUndoSize()
{
if (undo == null) return 0;
return undo.size();
}
public void clearUndo()
{
undo = new ArrayList<TransAction>();
undo_position = -1;
}
public void addUndo(Object from[], Object to[], int pos[], Point prev[], Point curr[], int type_of_change, boolean nextAlso)
{
// First clean up after the current position.
// Example: position at 3, size=5
// 012345
// ^
// remove 34
// Add 4
// 01234
while (undo.size() > undo_position + 1 && undo.size() > 0)
{
int last = undo.size() - 1;
undo.remove(last);
}
TransAction ta = new TransAction();
switch (type_of_change)
{
case TYPE_UNDO_CHANGE:
ta.setChanged(from, to, pos);
break;
case TYPE_UNDO_DELETE:
ta.setDelete(from, pos);
break;
case TYPE_UNDO_NEW:
ta.setNew(from, pos);
break;
case TYPE_UNDO_POSITION:
ta.setPosition(from, pos, prev, curr);
break;
}
undo.add(ta);
undo_position++;
if (undo.size() > max_undo)
{
undo.remove(0);
undo_position--;
}
}
// get previous undo, change position
public TransAction previousUndo()
{
if (undo.isEmpty() || undo_position < 0) return null; // No undo left!
TransAction retval = undo.get(undo_position);
undo_position--;
return retval;
}
/**
* View current undo, don't change undo position
*
* @return The current undo transaction
*/
public TransAction viewThisUndo()
{
if (undo.isEmpty() || undo_position < 0) return null; // No undo left!
TransAction retval = undo.get(undo_position);
return retval;
}
// View previous undo, don't change position
public TransAction viewPreviousUndo()
{
if (undo.isEmpty() || undo_position < 0) return null; // No undo left!
TransAction retval = undo.get(undo_position);
return retval;
}
public TransAction nextUndo()
{
int size = undo.size();
if (size == 0 || undo_position >= size - 1) return null; // no redo left...
undo_position++;
TransAction retval = undo.get(undo_position);
return retval;
}
public TransAction viewNextUndo()
{
int size = undo.size();
if (size == 0 || undo_position >= size - 1) return null; // no redo left...
TransAction retval = undo.get(undo_position + 1);
return retval;
}
public Point getMaximum()
{
int maxx = 0, maxy = 0;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy entry = getJobEntry(i);
Point loc = entry.getLocation();
if (loc.x > maxx) maxx = loc.x;
if (loc.y > maxy) maxy = loc.y;
}
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta ni = getNote(i);
Point loc = ni.getLocation();
if (loc.x + ni.width > maxx) maxx = loc.x + ni.width;
if (loc.y + ni.height > maxy) maxy = loc.y + ni.height;
}
return new Point(maxx + 100, maxy + 100);
}
public Point[] getSelectedLocations()
{
int sels = nrSelected();
Point retval[] = new Point[sels];
for (int i = 0; i < sels; i++)
{
JobEntryCopy si = getSelected(i);
Point p = si.getLocation();
retval[i] = new Point(p.x, p.y); // explicit copy of location
}
return retval;
}
public JobEntryCopy[] getSelectedEntries()
{
int sels = nrSelected();
if (sels == 0) return null;
JobEntryCopy retval[] = new JobEntryCopy[sels];
for (int i = 0; i < sels; i++)
{
JobEntryCopy je = getSelected(i);
retval[i] = je;
}
return retval;
}
public int nrSelected()
{
int i, count;
count = 0;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.isSelected() && je.isDrawn()) count++;
}
return count;
}
public JobEntryCopy getSelected(int nr)
{
int i, count;
count = 0;
for (i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy je = getJobEntry(i);
if (je.isSelected())
{
if (nr == count) return je;
count++;
}
}
return null;
}
public int[] getEntryIndexes(JobEntryCopy entries[])
{
int retval[] = new int[entries.length];
for (int i = 0; i < entries.length; i++)
retval[i] = indexOfJobEntry(entries[i]);
return retval;
}
public JobEntryCopy findStart()
{
for (int i = 0; i < nrJobEntries(); i++)
{
if (getJobEntry(i).isStart()) return getJobEntry(i);
}
return null;
}
public String toString()
{
if (name != null) return name;
if (filename != null)
return filename;
else
return getClass().getName();
}
/**
* @return Returns the logfieldUsed.
*/
public boolean isLogfieldUsed()
{
return logfieldUsed;
}
/**
* @param logfieldUsed The logfieldUsed to set.
*/
public void setLogfieldUsed(boolean logfieldUsed)
{
this.logfieldUsed = logfieldUsed;
}
/**
* @return Returns the useBatchId.
*/
public boolean isBatchIdUsed()
{
return useBatchId;
}
/**
* @param useBatchId The useBatchId to set.
*/
public void setUseBatchId(boolean useBatchId)
{
this.useBatchId = useBatchId;
}
/**
* @return Returns the batchIdPassed.
*/
public boolean isBatchIdPassed()
{
return batchIdPassed;
}
/**
* @param batchIdPassed The batchIdPassed to set.
*/
public void setBatchIdPassed(boolean batchIdPassed)
{
this.batchIdPassed = batchIdPassed;
}
/**
* Builds a list of all the SQL statements that this transformation needs in order to work properly.
*
* @return An ArrayList of SQLStatement objects.
*/
public List<SQLStatement> getSQLStatements(Repository repository, IProgressMonitor monitor) throws KettleException
{
if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.GettingSQLNeededForThisJob"), nrJobEntries() + 1); //$NON-NLS-1$
List<SQLStatement> stats = new ArrayList<SQLStatement>();
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy copy = getJobEntry(i);
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.GettingSQLForJobEntryCopy") + copy + "]"); //$NON-NLS-1$ //$NON-NLS-2$
List<SQLStatement> list = copy.getEntry().getSQLStatements(repository, this);
stats.addAll(list);
if (monitor != null) monitor.worked(1);
}
// Also check the sql for the logtable...
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.GettingSQLStatementsForJobLogTables")); //$NON-NLS-1$
if (logconnection != null && logTable != null && logTable.length() > 0)
{
Database db = new Database(logconnection);
try
{
db.connect();
RowMetaInterface fields = Database.getJobLogrecordFields(false, useBatchId, logfieldUsed);
String sql = db.getDDL(logTable, fields);
if (sql != null && sql.length() > 0)
{
SQLStatement stat = new SQLStatement(Messages.getString("JobMeta.SQLFeedback.ThisJob"), logconnection, sql); //$NON-NLS-1$
stats.add(stat);
}
}
catch (KettleDatabaseException dbe)
{
SQLStatement stat = new SQLStatement(Messages.getString("JobMeta.SQLFeedback.ThisJob"), logconnection, null); //$NON-NLS-1$
stat.setError(Messages.getString("JobMeta.SQLFeedback.ErrorObtainingJobLogTableInfo") + dbe.getMessage()); //$NON-NLS-1$
stats.add(stat);
}
finally
{
db.disconnect();
}
}
if (monitor != null) monitor.worked(1);
if (monitor != null) monitor.done();
return stats;
}
/**
* @return Returns the logTable.
*/
public String getLogTable()
{
return logTable;
}
/**
* @param logTable The logTable to set.
*/
public void setLogTable(String logTable)
{
this.logTable = logTable;
}
/**
* @return Returns the arguments.
*/
public String[] getArguments()
{
return arguments;
}
/**
* @param arguments The arguments to set.
*/
public void setArguments(String[] arguments)
{
this.arguments = arguments;
}
/**
* Get a list of all the strings used in this job.
*
* @return A list of StringSearchResult with strings used in the job
*/
public List<StringSearchResult> getStringList(boolean searchSteps, boolean searchDatabases, boolean searchNotes)
{
List<StringSearchResult> stringList = new ArrayList<StringSearchResult>();
if (searchSteps)
{
// Loop over all steps in the transformation and see what the used vars are...
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy entryMeta = getJobEntry(i);
stringList.add(new StringSearchResult(entryMeta.getName(), entryMeta, this, Messages.getString("JobMeta.SearchMetadata.JobEntryName"))); //$NON-NLS-1$
if (entryMeta.getDescription() != null)
stringList.add(new StringSearchResult(entryMeta.getDescription(), entryMeta, this, Messages.getString("JobMeta.SearchMetadata.JobEntryDescription"))); //$NON-NLS-1$
JobEntryInterface metaInterface = entryMeta.getEntry();
StringSearcher.findMetaData(metaInterface, 1, stringList, entryMeta, this);
}
}
// Loop over all steps in the transformation and see what the used vars are...
if (searchDatabases)
{
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta meta = getDatabase(i);
stringList.add(new StringSearchResult(meta.getName(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseConnectionName"))); //$NON-NLS-1$
if (meta.getDatabaseName() != null) stringList.add(new StringSearchResult(meta.getDatabaseName(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseName"))); //$NON-NLS-1$
if (meta.getUsername() != null) stringList.add(new StringSearchResult(meta.getUsername(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseUsername"))); //$NON-NLS-1$
if (meta.getDatabaseTypeDesc() != null)
stringList.add(new StringSearchResult(meta.getDatabaseTypeDesc(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabaseTypeDescription"))); //$NON-NLS-1$
if (meta.getDatabasePortNumberString() != null)
stringList.add(new StringSearchResult(meta.getDatabasePortNumberString(), meta, this, Messages.getString("JobMeta.SearchMetadata.DatabasePort"))); //$NON-NLS-1$
}
}
// Loop over all steps in the transformation and see what the used vars are...
if (searchNotes)
{
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta meta = getNote(i);
if (meta.getNote() != null) stringList.add(new StringSearchResult(meta.getNote(), meta, this, Messages.getString("JobMeta.SearchMetadata.NotepadText"))); //$NON-NLS-1$
}
}
return stringList;
}
public List<String> getUsedVariables()
{
// Get the list of Strings.
List<StringSearchResult> stringList = getStringList(true, true, false);
List<String> varList = new ArrayList<String>();
// Look around in the strings, see what we find...
for (StringSearchResult result : stringList)
{
StringUtil.getUsedVariables(result.getString(), varList, false);
}
return varList;
}
/**
* Get an array of all the selected job entries
*
* @return A list containing all the selected & drawn job entries.
*/
public List<GUIPositionInterface> getSelectedDrawnJobEntryList()
{
List<GUIPositionInterface> list = new ArrayList<GUIPositionInterface>();
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy jobEntryCopy = getJobEntry(i);
if (jobEntryCopy.isDrawn() && jobEntryCopy.isSelected())
{
list.add( jobEntryCopy);
}
}
return list;
}
public boolean haveConnectionsChanged()
{
if (changed_databases) return true;
for (int i = 0; i < nrDatabases(); i++)
{
DatabaseMeta ci = getDatabase(i);
if (ci.hasChanged()) return true;
}
return false;
}
public boolean haveJobEntriesChanged()
{
if (changed_entries) return true;
for (int i = 0; i < nrJobEntries(); i++)
{
JobEntryCopy entry = getJobEntry(i);
if (entry.hasChanged()) return true;
}
return false;
}
public boolean haveJobHopsChanged()
{
if (changed_hops) return true;
for (JobHopMeta hi:jobhops) // Look at all the hops
{
if (hi.hasChanged()) return true;
}
return false;
}
public boolean haveNotesChanged()
{
if (changed_notes) return true;
for (int i = 0; i < nrNotes(); i++)
{
NotePadMeta note = getNote(i);
if (note.hasChanged()) return true;
}
return false;
}
/**
* @return the sharedObjectsFile
*/
public String getSharedObjectsFile()
{
return sharedObjectsFile;
}
/**
* @param sharedObjectsFile the sharedObjectsFile to set
*/
public void setSharedObjectsFile(String sharedObjectsFile)
{
this.sharedObjectsFile = sharedObjectsFile;
}
/**
* @param modifiedUser The modifiedUser to set.
*/
public void setModifiedUser(String modified_User)
{
modifiedUser = modified_User;
}
/**
* @return Returns the modifiedUser.
*/
public String getModifiedUser()
{
return modifiedUser;
}
/**
* @param modifiedDate The modifiedDate to set.
*/
public void setModifiedDate(Date modified_Date)
{
modifiedDate = modified_Date;
}
/**
* @return Returns the modifiedDate.
*/
public Date getModifiedDate()
{
return modifiedDate;
}
/**
* @return The description of the job
*/
public String getDescription()
{
return description;
}
/**
* @return The extended description of the job
*/
public String getExtendedDescription()
{
return extended_description;
}
/**
* @return The version of the job
*/
public String getJobversion()
{
return job_version;
}
/**
* Get the status of the job
*/
public int getJobstatus()
{
return job_status;
}
/**
* Set the description of the job.
*
* @param n The new description of the job
*/
public void setDescription(String n)
{
description = n;
}
/**
* Set the description of the job.
*
* @param n The new extended description of the job
*/
public void setExtendedDescription(String n)
{
extended_description = n;
}
/**
* Set the version of the job.
*
* @param n The new version description of the job
*/
public void setJobversion(String n)
{
job_version = n;
}
/**
* Set the status of the job.
*
* @param n The new status description of the job
*/
public void setJobstatus(int n)
{
job_status = n;
}
/**
* @return Returns the createdDate.
*/
public Date getCreatedDate()
{
return created_date;
}
/**
* @param createdDate The createdDate to set.
*/
public void setCreatedDate(Date createddate)
{
created_date = createddate;
}
/**
* @param createdUser The createdUser to set.
*/
public void setCreatedUser(String createduser)
{
created_user = createduser;
}
/**
* @return Returns the createdUser.
*/
public String getCreatedUser()
{
return created_user;
}
/**
* Find a jobentry with a certain ID in a list of job entries.
* @param jobentries The List of jobentries
* @param id_jobentry The id of the jobentry
* @return The JobEntry object if one was found, null otherwise.
*/
public static final JobEntryInterface findJobEntry(List<JobEntryInterface> jobentries, long id_jobentry)
{
if (jobentries == null)
return null;
for (JobEntryInterface je : jobentries)
{
if (je.getID() == id_jobentry) {
return je;
}
}
return null;
}
/**
* Find a jobentrycopy with a certain ID in a list of job entry copies.
* @param jobcopies The List of jobentry copies
* @param id_jobentry_copy The id of the jobentry copy
* @return The JobEntryCopy object if one was found, null otherwise.
*/
public static final JobEntryCopy findJobEntryCopy(List<JobEntryCopy> jobcopies, long id_jobentry_copy)
{
if (jobcopies == null)
return null;
for (JobEntryCopy jec : jobcopies)
{
if (jec.getID() == id_jobentry_copy) {
return jec;
}
}
return null;
}
/**
* Calls setInternalKettleVariables on the default object.
*/
public void setInternalKettleVariables()
{
setInternalKettleVariables(variables);
}
/**
* This method sets various internal kettle variables that can be used by the transformation.
*/
public void setInternalKettleVariables(VariableSpace var)
{
if (filename!=null) // we have a finename that's defined.
{
try
{
FileObject fileObject = KettleVFS.getFileObject(filename);
FileName fileName = fileObject.getName();
// The filename of the transformation
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, fileName.getBaseName());
// The directory of the transformation
FileName fileDir = fileName.getParent();
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, fileDir.getURI());
}
catch(IOException e)
{
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, "");
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, "");
}
}
else
{
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, ""); //$NON-NLS-1$
var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, ""); //$NON-NLS-1$
}
// The name of the job
var.setVariable(Const.INTERNAL_VARIABLE_JOB_NAME, Const.NVL(name, "")); //$NON-NLS-1$
// The name of the directory in the repository
var.setVariable(Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY, directory != null ? directory.getPath() : ""); //$NON-NLS-1$
// Undefine the transformation specific variables:
// transformations can't run jobs, so if you use these they are 99.99% wrong.
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_NAME, null);
var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY, null);
}
public void copyVariablesFrom(VariableSpace space) {
variables.copyVariablesFrom(space);
}
public String environmentSubstitute(String aString)
{
return variables.environmentSubstitute(aString);
}
public String[] environmentSubstitute(String aString[])
{
return variables.environmentSubstitute(aString);
}
public VariableSpace getParentVariableSpace()
{
return variables.getParentVariableSpace();
}
public void setParentVariableSpace(VariableSpace parent)
{
variables.setParentVariableSpace(parent);
}
public String getVariable(String variableName, String defaultValue)
{
return variables.getVariable(variableName, defaultValue);
}
public String getVariable(String variableName)
{
return variables.getVariable(variableName);
}
public boolean getBooleanValueOfVariable(String variableName, boolean defaultValue) {
if (!Const.isEmpty(variableName))
{
String value = environmentSubstitute(variableName);
if (!Const.isEmpty(value))
{
return ValueMeta.convertStringToBoolean(value);
}
}
return defaultValue;
}
public void initializeVariablesFrom(VariableSpace parent)
{
variables.initializeVariablesFrom(parent);
}
public String[] listVariables()
{
return variables.listVariables();
}
public void setVariable(String variableName, String variableValue)
{
variables.setVariable(variableName, variableValue);
}
public void shareVariablesWith(VariableSpace space)
{
variables = space;
}
public void injectVariables(Map<String,String> prop)
{
variables.injectVariables(prop);
}
/**
* Check all job entries within the job. Each Job Entry has the opportunity to
* check their own settings.
* @param remarks List of CheckResult remarks inserted into by each JobEntry
* @param only_selected true if you only want to check the selected jobs
* @param monitor Progress monitor (not presently in use)
*/
public void checkJobEntries(List<CheckResultInterface> remarks, boolean only_selected, IProgressMonitor monitor) {
remarks.clear(); // Empty remarks
if (monitor != null) monitor.beginTask(Messages.getString("JobMeta.Monitor.VerifyingThisJobEntryTask.Title"), jobcopies.size() + 2); //$NON-NLS-1$
boolean stop_checking = false;
for (int i=0; i<jobcopies.size() && !stop_checking; i++) {
JobEntryCopy copy = jobcopies.get(i); // get the job entry copy
if ( (!only_selected) || (only_selected && copy.isSelected()) ) {
JobEntryInterface entry = copy.getEntry();
if (entry != null) {
if (monitor != null) monitor.subTask(Messages.getString("JobMeta.Monitor.VerifyingJobEntry.Title",entry.getName())); //$NON-NLS-1$ //$NON-NLS-2$
entry.check(remarks, this);
if (monitor != null) {
monitor.worked(1); // progress bar...
if (monitor.isCanceled()) {
stop_checking = true;
}
}
}
}
if (monitor != null) {
monitor.worked(1);
}
}
if (monitor != null) {
monitor.done();
}
}
public List<ResourceReference> getResourceDependencies() {
List<ResourceReference> resourceReferences = new ArrayList<ResourceReference>();
JobEntryCopy copy = null;
JobEntryInterface entry = null;
for (int i=0;i<jobcopies.size();i++) {
copy = jobcopies.get(i); // get the job entry copy
entry = copy.getEntry();
resourceReferences.addAll( entry.getResourceDependencies(this) );
}
return resourceReferences;
}
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface) throws KettleException {
try {
FileObject fileObject = KettleVFS.getFileObject(getFilename());
String name = namingInterface.nameResource(fileObject.getName().getBaseName(), fileObject.getParent().getName().getPath(), "kjb");
ResourceDefinition definition = definitions.get(name);
if (definition==null) {
// If we do this once, it will be plenty :-)
//
JobMeta jobMeta = (JobMeta) this.realClone(false);
// Add used resources, modify transMeta accordingly
// Go through the list of steps, etc.
// These critters change the steps in the cloned TransMeta
// At the end we make a new XML version of it in "exported" format...
// loop over steps, databases will be exported to XML anyway.
//
for (JobEntryCopy jobEntry: jobMeta.jobcopies) {
jobEntry.getEntry().exportResources(jobMeta, definitions, namingInterface);
}
// At the end, add ourselves to the map...
//
String transMetaContent = jobMeta.getXML();
definition = new ResourceDefinition(name, transMetaContent);
definitions.put(fileObject.getName().getPath(), definition);
}
} catch (FileSystemException e) {
throw new KettleException(Messages.getString("JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
} catch (IOException e) {
throw new KettleException(Messages.getString("JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
}
return filename;
}
/**
* @return the slaveServer list
*/
public List<SlaveServer> getSlaveServers() {
return slaveServers;
}
/**
* @param slaveServers the slaveServers to set
*/
public void setSlaveServers(List<SlaveServer> slaveServers) {
this.slaveServers = slaveServers;
}
/**
* Find a slave server using the name
* @param serverString the name of the slave server
* @return the slave server or null if we couldn't spot an approriate entry.
*/
public SlaveServer findSlaveServer(String serverString)
{
return SlaveServer.findSlaveServer(slaveServers, serverString);
}
/**
* @return An array list slave server names
*/
public String[] getSlaveServerNames()
{
return SlaveServer.getSlaveServerNames(slaveServers);
}
}
| SEMINOLE-128
Re-fixed by implementing a rudimentary Observer/Observable pattern using Spoon (the observer) and JobMeta and TransMeta (the observables.)
There is room for some refactoring later, including make a BaseMeta class from which JobMeta and TransMeta could inherit from and also creating the concept of an observable PDIObject that could be used to track these types of UI notifications from the core package.
git-svn-id: 51b39fcfd0d3a6ea7caa15377cad4af13b9d2664@5496 5fb7f6ec-07c1-534a-b4ca-9155e429e800
| src/org/pentaho/di/job/JobMeta.java | SEMINOLE-128 Re-fixed by implementing a rudimentary Observer/Observable pattern using Spoon (the observer) and JobMeta and TransMeta (the observables.) | <ide><path>rc/org/pentaho/di/job/JobMeta.java
<ide> import org.pentaho.di.core.Props;
<ide> import org.pentaho.di.core.RowMetaAndData;
<ide> import org.pentaho.di.core.SQLStatement;
<del>import org.pentaho.di.core.changed.ChangedFlagInterface;
<add>import org.pentaho.di.core.changed.ChangedFlag;
<ide> import org.pentaho.di.core.database.Database;
<ide> import org.pentaho.di.core.database.DatabaseMeta;
<ide> import org.pentaho.di.core.exception.KettleDatabaseException;
<ide> * @since 11-08-2003
<ide> *
<ide> */
<del>public class JobMeta implements Cloneable, Comparable<JobMeta>, XMLInterface, UndoInterface,
<del> HasDatabasesInterface, ChangedFlagInterface,
<add>public class JobMeta extends ChangedFlag implements Cloneable, Comparable<JobMeta>, XMLInterface, UndoInterface,
<add> HasDatabasesInterface,
<ide> VariableSpace, EngineMetaInterface,
<ide> ResourceExportInterface, HasSlaveServersInterface
<ide> {
<ide>
<ide> protected String arguments[];
<ide>
<del> protected boolean changed, changed_entries, changed_hops, changed_notes, changed_databases;
<add> protected boolean changed_entries, changed_hops, changed_notes, changed_databases;
<ide>
<ide> protected DatabaseMeta logconnection;
<ide>
<ide> this.databases = databases;
<ide> }
<ide>
<del> public void setChanged()
<del> {
<del> setChanged(true);
<del> }
<del>
<ide> public void setChanged(boolean ch)
<ide> {
<del> changed = ch;
<add> if (ch)
<add> setChanged();
<add> else
<add> clearChanged();
<ide> }
<ide>
<ide> public void clearChanged()
<ide> NotePadMeta note = getNote(i);
<ide> note.setChanged(false);
<ide> }
<del> changed = false;
<add> super.clearChanged();
<ide> }
<ide>
<ide> public boolean hasChanged()
<ide> {
<del> if (changed) return true;
<add> if (super.hasChanged()) return true;
<ide>
<ide> if (haveJobEntriesChanged()) return true;
<ide> if (haveJobHopsChanged()) return true;
<ide> JobEntryCopy ce = getJobEntry(i);
<ide> ce.setSelected(true);
<ide> }
<add>
<add> setChanged();
<add> notifyObservers("refreshGraph");
<ide> }
<ide>
<ide> public void unselectAll() |
|
JavaScript | mit | 78ef90618420429fc8e21165a8ab25b17ef8bfbd | 0 | Chrisui/react-hotkeys,JohnC-80/react-hotkeys,danauclair/react-hotkeys,danauclair/react-hotkeys,Chrisui/react-hotkeys,JohnC-80/react-hotkeys | import React from 'react';
import FocusTrap from './FocusTrap';
import HotKeyMapMixin from './HotKeyMapMixin';
import isArray from 'lodash/lang/isArray';
import isObject from 'lodash/lang/isObject';
import forEach from 'lodash/collection/forEach';
import isEqual from 'lodash/lang/isEqual';
function getSequencesFromMap(hotKeyMap, hotKeyName) {
const sequences = hotKeyMap[hotKeyName];
// If no sequence is found with this name we assume
// the user is passing a hard-coded sequence as a key
if (!sequences) {
return [hotKeyName];
}
if (isArray(sequences)) {
return sequences;
}
return [sequences];
}
const HotKeys = React.createClass({
mixins: [HotKeyMapMixin()],
propTypes: {
onFocus: React.PropTypes.func,
onBlur: React.PropTypes.func,
focusName: React.PropTypes.string, // Currently unused
keyMap: React.PropTypes.object,
handlers: React.PropTypes.object
},
contextTypes: {
hotKeyParent: React.PropTypes.any
},
childContextTypes: {
hotKeyParent: React.PropTypes.any
},
getChildContext() {
return {
hotKeyParent: this
};
},
componentDidMount() {
// import is here to support React's server rendering as Mousetrap immediately
// calls itself with window and it fails in Node environment
const Mousetrap = require('mousetrap');
// Not optimal - imagine hundreds of this component. We need a top level
// delegation point for mousetrap
this.__mousetrap__ = new Mousetrap(
React.findDOMNode(this.refs.focusTrap)
);
this.updateHotKeys(true);
},
componentDidUpdate(prevProps) {
this.updateHotKeys(false, prevProps);
},
componentWillUnmount() {
if (this.context.hotKeyParent) {
this.context.hotKeyParent.childHandledSequence(null);
}
this.__mousetrap__.reset();
},
updateHotKeys(force = false, prevProps = {}) {
const {handlers = {}} = this.props;
const {handlers: prevHandlers = handlers} = prevProps;
// Ensure map is up-to-date to begin with
// We will only bother continuing if the map was actually updated
if (!force && isEqual(handlers, prevHandlers) && !this.updateMap()) {
return;
}
const hotKeyMap = this.getMap();
const sequenceHandlers = [];
const mousetrap = this.__mousetrap__;
// Group all our handlers by sequence
forEach(handlers, (handler, hotKey) => {
const handlerSequences = getSequencesFromMap(hotKeyMap, hotKey);
// Could be optimized as every handler will get called across every bound
// component - imagine making a node a focus point and then having hundreds!
forEach(handlerSequences, (sequence) => {
let action;
const callback = (event, sequence) => {
// Check we are actually in focus and that a child hasn't already handled this sequence
if (this.__isFocused__ && sequence !== this.__lastChildSequence__) {
if (this.context.hotKeyParent) {
this.context.hotKeyParent.childHandledSequence(sequence);
}
return handler(event, sequence);
}
};
if (isObject(sequence)) {
action = sequence.action;
sequence = sequence.sequence;
}
sequenceHandlers.push({callback, action, sequence});
});
});
// Hard reset our handlers (probably could be more efficient)
mousetrap.reset();
forEach(sequenceHandlers, (handler) =>
mousetrap.bind(handler.sequence, handler.callback, handler.action));
},
childHandledSequence(sequence = null) {
this.__lastChildSequence__ = sequence;
// Traverse up any hot key parents so everyone is aware a child has handled a certain sequence
if (this.context.hotKeyParent) {
this.context.hotKeyParent.childHandledSequence(sequence);
}
},
onFocus() {
this.__isFocused__ = true;
if (this.props.onFocus) {
this.props.onFocus(...arguments);
}
},
onBlur() {
this.__isFocused__ = false;
if (this.props.onBlur) {
this.props.onBlur(...arguments);
}
},
render() {
return (
<FocusTrap ref="focusTrap" {...this.props} onFocus={this.onFocus} onBlur={this.onBlur}>
{this.props.children}
</FocusTrap>
)
}
});
export default HotKeys;
| lib/HotKeys.js | import React from 'react';
import FocusTrap from './FocusTrap';
import HotKeyMapMixin from './HotKeyMapMixin';
import isArray from 'lodash/lang/isArray';
import isObject from 'lodash/lang/isObject';
import forEach from 'lodash/collection/forEach';
function getSequencesFromMap(hotKeyMap, hotKeyName) {
const sequences = hotKeyMap[hotKeyName];
// If no sequence is found with this name we assume
// the user is passing a hard-coded sequence as a key
if (!sequences) {
return [hotKeyName];
}
if (isArray(sequences)) {
return sequences;
}
return [sequences];
}
const HotKeys = React.createClass({
mixins: [HotKeyMapMixin()],
propTypes: {
onFocus: React.PropTypes.func,
onBlur: React.PropTypes.func,
focusName: React.PropTypes.string, // Currently unused
keyMap: React.PropTypes.object,
handlers: React.PropTypes.object
},
contextTypes: {
hotKeyParent: React.PropTypes.any
},
childContextTypes: {
hotKeyParent: React.PropTypes.any
},
getChildContext() {
return {
hotKeyParent: this
};
},
componentDidMount() {
// import is here to support React's server rendering as Mousetrap immediately
// calls itself with window and it fails in Node environment
const Mousetrap = require('mousetrap');
// Not optimal - imagine hundreds of this component. We need a top level
// delegation point for mousetrap
this.__mousetrap__ = new Mousetrap(
React.findDOMNode(this.refs.focusTrap)
);
this.updateHotKeys(true);
},
componentDidUpdate() {
this.updateHotKeys();
},
componentWillUnmount() {
if (this.context.hotKeyParent) {
this.context.hotKeyParent.childHandledSequence(null);
}
this.__mousetrap__.reset();
},
updateHotKeys(force = false) {
// Ensure map is up-to-date to begin with
// We will only bother continuing if the map was actually updated
if (!this.updateMap() && !force) {
return;
}
const {handlers = {}} = this.props;
const hotKeyMap = this.getMap();
const sequenceHandlers = [];
const mousetrap = this.__mousetrap__;
// Group all our handlers by sequence
forEach(handlers, (handler, hotKey) => {
const handlerSequences = getSequencesFromMap(hotKeyMap, hotKey);
// Could be optimized as every handler will get called across every bound
// component - imagine making a node a focus point and then having hundreds!
forEach(handlerSequences, (sequence) => {
let action;
const callback = (event, sequence) => {
// Check we are actually in focus and that a child hasn't already handled this sequence
if (this.__isFocused__ && sequence !== this.__lastChildSequence__) {
if (this.context.hotKeyParent) {
this.context.hotKeyParent.childHandledSequence(sequence);
}
return handler(event, sequence);
}
};
if (isObject(sequence)) {
action = sequence.action;
sequence = sequence.sequence;
}
sequenceHandlers.push({callback, action, sequence});
});
});
// Hard reset our handlers (probably could be more efficient)
mousetrap.reset();
forEach(sequenceHandlers, (handler) =>
mousetrap.bind(handler.sequence, handler.callback, handler.action));
},
childHandledSequence(sequence = null) {
this.__lastChildSequence__ = sequence;
// Traverse up any hot key parents so everyone is aware a child has handled a certain sequence
if (this.context.hotKeyParent) {
this.context.hotKeyParent.childHandledSequence(sequence);
}
},
onFocus() {
this.__isFocused__ = true;
if (this.props.onFocus) {
this.props.onFocus(...arguments);
}
},
onBlur() {
this.__isFocused__ = false;
if (this.props.onBlur) {
this.props.onBlur(...arguments);
}
},
render() {
return (
<FocusTrap ref="focusTrap" {...this.props} onFocus={this.onFocus} onBlur={this.onBlur}>
{this.props.children}
</FocusTrap>
)
}
});
export default HotKeys;
| Fix hotkey bindings not being updated when handlers map changes but key map does not
| lib/HotKeys.js | Fix hotkey bindings not being updated when handlers map changes but key map does not | <ide><path>ib/HotKeys.js
<ide> import isArray from 'lodash/lang/isArray';
<ide> import isObject from 'lodash/lang/isObject';
<ide> import forEach from 'lodash/collection/forEach';
<add>import isEqual from 'lodash/lang/isEqual';
<ide>
<ide> function getSequencesFromMap(hotKeyMap, hotKeyName) {
<ide> const sequences = hotKeyMap[hotKeyName];
<ide> this.updateHotKeys(true);
<ide> },
<ide>
<del> componentDidUpdate() {
<del> this.updateHotKeys();
<add> componentDidUpdate(prevProps) {
<add> this.updateHotKeys(false, prevProps);
<ide> },
<ide>
<ide> componentWillUnmount() {
<ide> this.__mousetrap__.reset();
<ide> },
<ide>
<del> updateHotKeys(force = false) {
<add> updateHotKeys(force = false, prevProps = {}) {
<add> const {handlers = {}} = this.props;
<add> const {handlers: prevHandlers = handlers} = prevProps;
<add>
<ide> // Ensure map is up-to-date to begin with
<ide> // We will only bother continuing if the map was actually updated
<del> if (!this.updateMap() && !force) {
<add> if (!force && isEqual(handlers, prevHandlers) && !this.updateMap()) {
<ide> return;
<ide> }
<ide>
<del> const {handlers = {}} = this.props;
<ide> const hotKeyMap = this.getMap();
<ide> const sequenceHandlers = [];
<ide> const mousetrap = this.__mousetrap__; |
|
JavaScript | apache-2.0 | 45da991659c3acb964a14692b2377630cbbb6fa4 | 0 | wavesplatform/UpcoinWallet,wavesplatform/UpcoinWallet | (function () {
'use strict';
var DEFAULT_FEE_AMOUNT = '0.001';
function WavesWalletListController($scope, $interval, events, applicationContext,
apiService, transactionLoadingService) {
var walletList = this;
var refreshPromise;
var refreshDelay = 10 * 1000;
function unimplementedFeature() {
$scope.home.featureUnderDevelopment();
}
function findWalletByCurrency(currency) {
return _.find(walletList.wallets, function (w) {
return w.balance.currency === currency;
});
}
walletList.wallets = [
{
balance: new Money(0, Currency.UPC)
},
{
balance: new Money(0, Currency.BTC)
},
{
balance: new Money(0, Currency.WAV),
hidden: true
}
];
walletList.transactions = [];
walletList.send = send;
walletList.withdraw = withdraw;
walletList.trade = trade;
loadDataFromBackend();
patchCurrencyIdsForTestnet();
$scope.$on('$destroy', function () {
if (angular.isDefined(refreshPromise)) {
$interval.cancel(refreshPromise);
refreshPromise = undefined;
}
});
function send (currency) {
var assetWallet = findWalletByCurrency(currency);
var wavesWallet = findWalletByCurrency(Currency.WAV);
$scope.$broadcast(events.WALLET_SEND, {
assetBalance: assetWallet.balance,
wavesBalance: wavesWallet.balance
});
}
function withdraw (currency) {
unimplementedFeature();
}
function trade (currency) {
unimplementedFeature();
}
function loadDataFromBackend() {
refreshWallets();
refreshTransactions();
refreshPromise = $interval(function() {
refreshWallets();
refreshTransactions();
}, refreshDelay);
}
function refreshWallets() {
apiService.address.balance(applicationContext.account.address)
.then(function (response) {
var wavesWallet = findWalletByCurrency(Currency.WAV);
wavesWallet.balance = Money.fromCoins(response.balance, Currency.WAV);
});
apiService.assets.balance(applicationContext.account.address).then(function (response) {
_.forEach(response.balances, function (assetBalance) {
var id = assetBalance.assetId;
// adding asset details to cache
applicationContext.cache.assets.put(assetBalance.issueTransaction);
applicationContext.cache.assets.update(id, assetBalance.balance,
assetBalance.reissuable, assetBalance.quantity);
});
_.forEach(walletList.wallets, function (wallet) {
var asset = applicationContext.cache.assets[wallet.balance.currency.id];
if (asset) {
wallet.balance = asset.balance;
}
});
});
}
function refreshTransactions() {
var txArray;
transactionLoadingService.loadTransactions(applicationContext.account.address)
.then(function (transactions) {
txArray = transactions;
return transactionLoadingService.refreshAssetCache(applicationContext.cache.assets, transactions);
})
.then(function () {
walletList.transactions = txArray;
});
}
/* AssetId substitution for testnet only.
Mainnet version uses default asset identifiers.
*/
function patchCurrencyIdsForTestnet() {
if ($scope.isTestnet()) {
Currency.EUR.id = '8zEZuJcKPQmFuYgVe5ZMpxgiPLu5zBhjA6xgdGomQDaP';
Currency.USD.id = '2aSqCbvCTgvCpwkGsk4mea4tCLG4Zgp69aQDhHNvRUZv';
Currency.CNY.id = 'D2MNuUyA38pSKoV7F7vpS15Uhw9nw5qfbrGUfCLRNuRo';
Currency.BTC.id = '7g151iXK8fyxB5sBUHkwQNXhVBuXdbK8ftPB3h1NrrYV';
Currency.UPC.id = '6MPKrD5B7GrfbciHECg1MwdvRUhRETApgNZspreBJ8JL';
}
}
}
WavesWalletListController.$inject = ['$scope', '$interval', 'wallet.events',
'applicationContext', 'apiService', 'transactionLoadingService'];
angular
.module('app.wallet')
.controller('walletListController', WavesWalletListController);
})();
| src/js/wallet/wallet.list.controller.js | (function () {
'use strict';
var DEFAULT_FEE_AMOUNT = '0.001';
function WavesWalletListController($scope, $interval, events, applicationContext,
apiService, transactionLoadingService) {
var walletList = this;
var refreshPromise;
var refreshDelay = 10 * 1000;
function unimplementedFeature() {
$scope.home.featureUnderDevelopment();
}
function findWalletByCurrency(currency) {
return _.find(walletList.wallets, function (w) {
return w.balance.currency === currency;
});
}
walletList.wallets = [
{
balance: new Money(0, Currency.UPC)
},
{
balance: new Money(0, Currency.BTC)
},
{
balance: new Money(0, Currency.WAV),
hidden: true
}
];
walletList.transactions = [];
walletList.send = send;
walletList.withdraw = withdraw;
walletList.trade = trade;
loadDataFromBackend();
patchCurrencyIdsForTestnet();
$scope.$on('$destroy', function () {
if (angular.isDefined(refreshPromise)) {
$interval.cancel(refreshPromise);
refreshPromise = undefined;
}
});
function send (currency) {
var assetWallet = findWalletByCurrency(currency);
var wavesWallet = findWalletByCurrency(Currency.WAV);
$scope.$broadcast(events.WALLET_SEND, {
assetBalance: assetWallet.balance,
wavesBalance: wavesWallet.balance
});
}
function withdraw (currency) {
unimplementedFeature();
}
function trade (currency) {
unimplementedFeature();
}
function loadDataFromBackend() {
refreshWallets();
refreshTransactions();
refreshPromise = $interval(function() {
refreshWallets();
refreshTransactions();
}, refreshDelay);
}
function refreshWallets() {
apiService.address.balance(applicationContext.account.address)
.then(function (response) {
var wavesWallet = findWalletByCurrency(Currency.WAV);
wavesWallet.balance = Money.fromCoins(response.balance, Currency.WAV);
});
apiService.assets.balance(applicationContext.account.address).then(function (response) {
_.forEach(response.balances, function (assetBalance) {
var id = assetBalance.assetId;
// adding asset details to cache
applicationContext.cache.assets.put(assetBalance.issueTransaction);
applicationContext.cache.assets.update(id, assetBalance.balance,
assetBalance.reissuable, assetBalance.quantity);
});
_.forEach(walletList.wallets, function (wallet) {
var asset = applicationContext.cache.assets[wallet.balance.currency.id];
if (asset) {
wallet.balance = asset.balance;
}
});
});
}
function refreshTransactions() {
var txArray;
transactionLoadingService.loadTransactions(applicationContext.account.address)
.then(function (transactions) {
txArray = transactions;
return transactionLoadingService.refreshAssetCache(applicationContext.cache.assets, transactions);
})
.then(function () {
walletList.transactions = txArray;
});
}
/* AssetId substitution for testnet only.
Mainnet version uses default asset identifiers.
*/
function patchCurrencyIdsForTestnet() {
if ($scope.isTestnet()) {
Currency.EUR.id = '8zEZuJcKPQmFuYgVe5ZMpxgiPLu5zBhjA6xgdGomQDaP';
Currency.USD.id = '2aSqCbvCTgvCpwkGsk4mea4tCLG4Zgp69aQDhHNvRUZv';
Currency.CNY.id = 'D2MNuUyA38pSKoV7F7vpS15Uhw9nw5qfbrGUfCLRNuRo';
Currency.BTC.id = '7g151iXK8fyxB5sBUHkwQNXhVBuXdbK8ftPB3h1NrrYV';
Currency.UPC.id = '2sn59CFYKBwQYGfZuzwGEJehieWpYnHZpfw6fNtpjUAx';
// Currency.UPC.id = '6MPKrD5B7GrfbciHECg1MwdvRUhRETApgNZspreBJ8JL';
}
}
}
WavesWalletListController.$inject = ['$scope', '$interval', 'wallet.events',
'applicationContext', 'apiService', 'transactionLoadingService'];
angular
.module('app.wallet')
.controller('walletListController', WavesWalletListController);
})();
| Fixed testnet Upcoin ID
| src/js/wallet/wallet.list.controller.js | Fixed testnet Upcoin ID | <ide><path>rc/js/wallet/wallet.list.controller.js
<ide> Currency.USD.id = '2aSqCbvCTgvCpwkGsk4mea4tCLG4Zgp69aQDhHNvRUZv';
<ide> Currency.CNY.id = 'D2MNuUyA38pSKoV7F7vpS15Uhw9nw5qfbrGUfCLRNuRo';
<ide> Currency.BTC.id = '7g151iXK8fyxB5sBUHkwQNXhVBuXdbK8ftPB3h1NrrYV';
<del> Currency.UPC.id = '2sn59CFYKBwQYGfZuzwGEJehieWpYnHZpfw6fNtpjUAx';
<del> // Currency.UPC.id = '6MPKrD5B7GrfbciHECg1MwdvRUhRETApgNZspreBJ8JL';
<add> Currency.UPC.id = '6MPKrD5B7GrfbciHECg1MwdvRUhRETApgNZspreBJ8JL';
<ide> }
<ide> }
<ide> } |
|
Java | bsd-3-clause | a321484851dc2c392f6c593938fe88752edb802c | 0 | NCIP/cagrid,NCIP/cagrid,NCIP/cagrid,NCIP/cagrid | package gov.nih.nci.cabig.introduce;
import gov.nih.nci.cabig.introduce.steps.AddSimpleMethodStep;
import gov.nih.nci.cabig.introduce.steps.AddSimpleMethodWithFaultStep;
import gov.nih.nci.cabig.introduce.steps.CreateSkeletonStep;
import gov.nih.nci.cabig.introduce.steps.RemoveMethodStep;
import gov.nih.nci.cabig.introduce.steps.RemoveSkeletonStep;
import gov.nih.nci.cabig.introduce.steps.RollBackStep;
import java.util.Vector;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import junit.textui.TestRunner;
import com.atomicobject.haste.framework.Story;
public class SyncToolsTest extends Story {
private TestCaseInfo tci;
protected Vector steps() {
this.tci = new TestCaseInfo();
Vector steps = new Vector();
steps.add(new CreateSkeletonStep(tci));
steps.add(new AddSimpleMethodStep(tci, "newMethod"));
steps.add(new AddSimpleMethodWithFaultStep(tci,"newMethodWithFault"));
//steps.add(new RemoveMethodStep(tci, "newMethod"));
//steps.add(new AddSimpleMethodStep(tci,"newMethod2"));
//steps.add(new AddSimpleMethodStep(tci,"newMethod"));
//steps.add(new RollBackStep(tci));
return steps;
}
public String getDescription() {
return "Tests the code generation tools";
}
protected void storyTearDown() throws Throwable {
RemoveSkeletonStep step = new RemoveSkeletonStep(tci);
step.runStep();
}
// used to make sure that if we are going to use a junit testsuite to test this
// that the test suite will not error out looking for a single test......
public void testDummy() throws Throwable {
}
/**
* Convenience method for running all the Steps in this Story.
*/
public static void main(String args[]) {
TestRunner runner = new TestRunner();
TestResult result = runner.doRun(new TestSuite(SyncToolsTest.class));
System.exit(result.errorCount() + result.failureCount());
}
}
| cagrid-1-0/caGrid/projects/introduce/test/src/java/Introduce/gov/nih/nci/cabig/introduce/SyncToolsTest.java | package gov.nih.nci.cabig.introduce;
import gov.nih.nci.cabig.introduce.steps.AddSimpleMethodStep;
import gov.nih.nci.cabig.introduce.steps.AddSimpleMethodWithFaultStep;
import gov.nih.nci.cabig.introduce.steps.CreateSkeletonStep;
import gov.nih.nci.cabig.introduce.steps.RemoveMethodStep;
import gov.nih.nci.cabig.introduce.steps.RemoveSkeletonStep;
import gov.nih.nci.cabig.introduce.steps.RollBackStep;
import java.util.Vector;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import junit.textui.TestRunner;
import com.atomicobject.haste.framework.Story;
public class SyncToolsTest extends Story {
private TestCaseInfo tci;
protected Vector steps() {
this.tci = new TestCaseInfo();
Vector steps = new Vector();
steps.add(new CreateSkeletonStep(tci));
steps.add(new AddSimpleMethodStep(tci, "newMethod"));
steps.add(new AddSimpleMethodWithFaultStep(tci,"newMethodWithFault"));
steps.add(new RemoveMethodStep(tci, "newMethod"));
steps.add(new AddSimpleMethodStep(tci,"newMethod2"));
//steps.add(new AddSimpleMethodStep(tci,"newMethod"));
//steps.add(new RollBackStep(tci));
return steps;
}
public String getDescription() {
return "Tests the code generation tools";
}
protected void storyTearDown() throws Throwable {
RemoveSkeletonStep step = new RemoveSkeletonStep(tci);
step.runStep();
}
// used to make sure that if we are going to use a junit testsuite to test this
// that the test suite will not error out looking for a single test......
public void testDummy() throws Throwable {
}
/**
* Convenience method for running all the Steps in this Story.
*/
public static void main(String args[]) {
TestRunner runner = new TestRunner();
TestResult result = runner.doRun(new TestSuite(SyncToolsTest.class));
System.exit(result.errorCount() + result.failureCount());
}
}
| *** empty log message ***
| cagrid-1-0/caGrid/projects/introduce/test/src/java/Introduce/gov/nih/nci/cabig/introduce/SyncToolsTest.java | *** empty log message *** | <ide><path>agrid-1-0/caGrid/projects/introduce/test/src/java/Introduce/gov/nih/nci/cabig/introduce/SyncToolsTest.java
<ide> steps.add(new CreateSkeletonStep(tci));
<ide> steps.add(new AddSimpleMethodStep(tci, "newMethod"));
<ide> steps.add(new AddSimpleMethodWithFaultStep(tci,"newMethodWithFault"));
<del> steps.add(new RemoveMethodStep(tci, "newMethod"));
<del> steps.add(new AddSimpleMethodStep(tci,"newMethod2"));
<add> //steps.add(new RemoveMethodStep(tci, "newMethod"));
<add> //steps.add(new AddSimpleMethodStep(tci,"newMethod2"));
<ide> //steps.add(new AddSimpleMethodStep(tci,"newMethod"));
<ide> //steps.add(new RollBackStep(tci));
<ide> return steps; |
|
Java | apache-2.0 | e329280c5da5396265d891cfdc82daa51baa97fd | 0 | bazelbuild/bazel-buildfarm,bazelbuild/bazel-buildfarm,bazelbuild/bazel-buildfarm,bazelbuild/bazel-buildfarm,bazelbuild/bazel-buildfarm,bazelbuild/bazel-buildfarm,bazelbuild/bazel-buildfarm | // Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package build.buildfarm.worker;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
import static build.buildfarm.v1test.ExecutionPolicy.PolicyCase.WRAPPER;
import static java.lang.String.format;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.logging.Level.SEVERE;
import build.bazel.remote.execution.v2.ActionResult;
import build.bazel.remote.execution.v2.Command;
import build.bazel.remote.execution.v2.ExecuteOperationMetadata;
import build.bazel.remote.execution.v2.ExecuteResponse;
import build.bazel.remote.execution.v2.ExecutionStage;
import build.bazel.remote.execution.v2.Platform;
import build.bazel.remote.execution.v2.Platform.Property;
import build.buildfarm.common.Write;
import build.buildfarm.common.Write.NullWrite;
import build.buildfarm.v1test.ExecutingOperationMetadata;
import build.buildfarm.v1test.ExecutionPolicy;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.SettableFuture;
import com.google.longrunning.Operation;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Duration;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.util.Timestamps;
import com.google.rpc.Code;
import io.grpc.Deadline;
import java.nio.file.Path;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
class Executor implements Runnable {
private static final int INCOMPLETE_EXIT_CODE = -1;
private static final Logger logger = Logger.getLogger(Executor.class.getName());
private final WorkerContext workerContext;
private final OperationContext operationContext;
private final ExecuteActionStage owner;
private int exitCode = INCOMPLETE_EXIT_CODE;
Executor(WorkerContext workerContext, OperationContext operationContext, ExecuteActionStage owner) {
this.workerContext = workerContext;
this.operationContext = operationContext;
this.owner = owner;
}
private long runInterruptible(Stopwatch stopwatch) throws InterruptedException {
ExecuteOperationMetadata metadata;
try {
metadata = operationContext.operation
.getMetadata().unpack(ExecuteOperationMetadata.class);
} catch (InvalidProtocolBufferException e) {
logger.log(SEVERE, "invalid execute operation metadata", e);
return 0;
}
ExecuteOperationMetadata executingMetadata = metadata.toBuilder()
.setStage(ExecutionStage.Value.EXECUTING)
.build();
long startedAt = System.currentTimeMillis();
Operation operation = operationContext.operation.toBuilder()
.setMetadata(Any.pack(ExecutingOperationMetadata.newBuilder()
.setStartedAt(startedAt)
.setExecutingOn(workerContext.getName())
.setExecuteOperationMetadata(executingMetadata)
.setRequestMetadata(operationContext.queueEntry.getExecuteEntry().getRequestMetadata())
.build()))
.build();
boolean operationUpdateSuccess = false;
try {
operationUpdateSuccess = workerContext.putOperation(operation, operationContext.action);
} catch (IOException e) {
logger.log(SEVERE, format("error putting operation %s as EXECUTING", operation.getName()), e);
}
if (!operationUpdateSuccess) {
logger.warning(
String.format(
"Executor::run(%s): could not transition to EXECUTING",
operation.getName()));
owner.error().put(operationContext);
return 0;
}
Duration timeout;
if (operationContext.action.hasTimeout()) {
timeout = operationContext.action.getTimeout();
} else {
timeout = null;
}
if (timeout == null && workerContext.hasDefaultActionTimeout()) {
timeout = workerContext.getDefaultActionTimeout();
}
Deadline pollDeadline;
if (timeout == null) {
pollDeadline = Deadline.after(10, DAYS);
} else {
pollDeadline = Deadline.after(
// 10s of padding for the timeout in question, so that we can guarantee cleanup
(timeout.getSeconds() + 10) * 1000000 + timeout.getNanos() / 1000,
MICROSECONDS);
}
workerContext.resumePoller(
operationContext.poller,
"Executor",
operationContext.queueEntry,
ExecutionStage.Value.EXECUTING,
Thread.currentThread()::interrupt,
pollDeadline);
try {
return executePolled(operation, timeout, stopwatch);
} finally {
operationContext.poller.pause();
}
}
private long executePolled(
Operation operation,
Duration timeout,
Stopwatch stopwatch) throws InterruptedException {
/* execute command */
workerContext.logInfo("Executor: Operation " + operation.getName() + " Executing command");
Platform platform = operationContext.command.getPlatform();
ImmutableList.Builder<ExecutionPolicy> policies = ImmutableList.builder();
ExecutionPolicy defaultPolicy = workerContext.getExecutionPolicy("");
if (defaultPolicy != null) {
policies.add(defaultPolicy);
}
for (Property property : platform.getPropertiesList()) {
if (property.getName().equals("execution-policy")) {
policies.add(workerContext.getExecutionPolicy(property.getValue()));
}
}
ActionResult.Builder resultBuilder = operationContext.executeResponse
.getResultBuilder();
resultBuilder.getExecutionMetadataBuilder()
.setExecutionStartTimestamp(Timestamps.fromMillis(System.currentTimeMillis()));
Code statusCode;
try {
statusCode = executeCommand(
operation.getName(),
operationContext.execDir,
operationContext.command,
timeout,
"", // executingMetadata.getStdoutStreamName(),
"", // executingMetadata.getStderrStreamName(),
resultBuilder,
policies.build());
} catch (IOException e) {
logger.log(SEVERE, "error executing operation " + operation.getName(), e);
operationContext.poller.pause();
owner.error().put(operationContext);
return 0;
}
resultBuilder.getExecutionMetadataBuilder()
.setExecutionCompletedTimestamp(Timestamps.fromMillis(System.currentTimeMillis()));
long executeUSecs = stopwatch.elapsed(MICROSECONDS);
logger.info(
String.format(
"Executor::executeCommand(%s): Completed command: exit code %d",
operation.getName(),
resultBuilder.getExitCode()));
operationContext.executeResponse.getStatusBuilder()
.setCode(statusCode.getNumber());
OperationContext reportOperationContext = operationContext.toBuilder()
.setOperation(operation)
.build();
boolean claimed = owner.output().claim();
operationContext.poller.pause();
if (claimed) {
try {
owner.output().put(reportOperationContext);
} catch (InterruptedException e) {
owner.output().release();
throw e;
}
} else {
// FIXME we need to release the action root
workerContext.logInfo("Executor: Operation " + operation.getName() + " Failed to claim output");
owner.error().put(operationContext);
}
return stopwatch.elapsed(MICROSECONDS) - executeUSecs;
}
@Override
public void run() {
long stallUSecs = 0;
Stopwatch stopwatch = Stopwatch.createStarted();
String operationName = operationContext.operation.getName();
try {
stallUSecs = runInterruptible(stopwatch);
} catch (InterruptedException e) {
/* we can be interrupted when the poller fails */
try {
owner.error().put(operationContext);
} catch (InterruptedException errorEx) {
logger.log(SEVERE, "interrupted while erroring " + operationName, errorEx);
} finally {
Thread.currentThread().interrupt();
}
} catch (Exception e) {
logger.log(SEVERE, "errored during execution of " + operationName, e);
try {
owner.error().put(operationContext);
} catch (InterruptedException errorEx) {
logger.log(SEVERE, format("interrupted while erroring %s after error", operationName), errorEx);
} catch (Throwable t) {
logger.log(SEVERE, format("errored while erroring %s after error", operationName), t);
}
throw e;
} finally {
boolean wasInterrupted = Thread.interrupted();
try {
owner.releaseExecutor(
operationName,
stopwatch.elapsed(MICROSECONDS),
stallUSecs,
exitCode);
} finally {
if (wasInterrupted) {
Thread.currentThread().interrupt();
}
}
}
}
private Code executeCommand(
String operationName,
Path execDir,
Command command,
Duration timeout,
String stdoutStreamName,
String stderrStreamName,
ActionResult.Builder resultBuilder,
Iterable<ExecutionPolicy> policies)
throws IOException, InterruptedException {
ImmutableList.Builder<String> arguments = ImmutableList.builder();
arguments.addAll(
transform(
filter(policies, (policy) -> policy.getPolicyCase() == WRAPPER),
(policy) -> policy.getWrapper().getPath()));
arguments.addAll(command.getArgumentsList());
ProcessBuilder processBuilder =
new ProcessBuilder(arguments.build())
.directory(execDir.toAbsolutePath().toFile());
Map<String, String> environment = processBuilder.environment();
environment.clear();
for (Command.EnvironmentVariable environmentVariable : command.getEnvironmentVariablesList()) {
environment.put(environmentVariable.getName(), environmentVariable.getValue());
}
final Write stdoutWrite, stderrWrite;
if (stdoutStreamName != null && !stdoutStreamName.isEmpty() && workerContext.getStreamStdout()) {
stdoutWrite = workerContext.getOperationStreamWrite(stdoutStreamName);
} else {
stdoutWrite = new NullWrite();
}
if (stderrStreamName != null && !stderrStreamName.isEmpty() && workerContext.getStreamStderr()) {
stderrWrite = workerContext.getOperationStreamWrite(stderrStreamName);
} else {
stderrWrite = new NullWrite();
}
long startNanoTime = System.nanoTime();
Process process;
try {
synchronized (this) {
process = processBuilder.start();
}
process.getOutputStream().close();
} catch(IOException e) {
logger.log(SEVERE, "error starting process for " + operationName, e);
// again, should we do something else here??
resultBuilder.setExitCode(INCOMPLETE_EXIT_CODE);
return Code.INVALID_ARGUMENT;
}
stdoutWrite.reset();
stderrWrite.reset();
ByteStringWriteReader stdoutReader = new ByteStringWriteReader(
process.getInputStream(), stdoutWrite);
ByteStringWriteReader stderrReader = new ByteStringWriteReader(
process.getErrorStream(), stderrWrite);
Thread stdoutReaderThread = new Thread(stdoutReader);
Thread stderrReaderThread = new Thread(stderrReader);
stdoutReaderThread.start();
stderrReaderThread.start();
Code statusCode = Code.OK;
try {
if (timeout == null) {
exitCode = process.waitFor();
} else {
long timeoutNanos = timeout.getSeconds() * 1000000000L + timeout.getNanos();
long remainingNanoTime = timeoutNanos - (System.nanoTime() - startNanoTime);
if (process.waitFor(remainingNanoTime, TimeUnit.NANOSECONDS)) {
exitCode = process.exitValue();
} else {
logger.info("process timed out for " + operationName);
process.destroy();
if (!process.waitFor(1, TimeUnit.SECONDS)) {
logger.info(format("process did not respond to termination for %s, killing it", operationName));
process.destroyForcibly();
process.waitFor(100, TimeUnit.MILLISECONDS); // fair trade, i think
}
statusCode = Code.DEADLINE_EXCEEDED;
}
}
} catch (InterruptedException e) {
process.destroy();
if (!process.waitFor(1, TimeUnit.SECONDS)) {
process.destroyForcibly();
process.waitFor(100, TimeUnit.MILLISECONDS);
}
throw e;
}
stdoutReaderThread.join();
stderrReaderThread.join();
resultBuilder
.setExitCode(exitCode)
.setStdoutRaw(stdoutReader.getData())
.setStderrRaw(stderrReader.getData());
return statusCode;
}
}
| src/main/java/build/buildfarm/worker/Executor.java | // Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package build.buildfarm.worker;
import static com.google.common.collect.Iterables.filter;
import static com.google.common.collect.Iterables.transform;
import static build.buildfarm.v1test.ExecutionPolicy.PolicyCase.WRAPPER;
import static java.lang.String.format;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.logging.Level.SEVERE;
import build.bazel.remote.execution.v2.ActionResult;
import build.bazel.remote.execution.v2.Command;
import build.bazel.remote.execution.v2.ExecuteOperationMetadata;
import build.bazel.remote.execution.v2.ExecuteResponse;
import build.bazel.remote.execution.v2.ExecutionStage;
import build.bazel.remote.execution.v2.Platform;
import build.bazel.remote.execution.v2.Platform.Property;
import build.buildfarm.common.Write;
import build.buildfarm.common.Write.NullWrite;
import build.buildfarm.v1test.ExecutingOperationMetadata;
import build.buildfarm.v1test.ExecutionPolicy;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.SettableFuture;
import com.google.longrunning.Operation;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Duration;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.util.Timestamps;
import com.google.rpc.Code;
import io.grpc.Deadline;
import java.nio.file.Path;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
class Executor implements Runnable {
private static final int INCOMPLETE_EXIT_CODE = -1;
private static final Logger logger = Logger.getLogger(Executor.class.getName());
private final WorkerContext workerContext;
private final OperationContext operationContext;
private final ExecuteActionStage owner;
private int exitCode = INCOMPLETE_EXIT_CODE;
Executor(WorkerContext workerContext, OperationContext operationContext, ExecuteActionStage owner) {
this.workerContext = workerContext;
this.operationContext = operationContext;
this.owner = owner;
}
private long runInterruptible(Stopwatch stopwatch) throws InterruptedException {
ExecuteOperationMetadata metadata;
try {
metadata = operationContext.operation
.getMetadata().unpack(ExecuteOperationMetadata.class);
} catch (InvalidProtocolBufferException e) {
logger.log(SEVERE, "invalid execute operation metadata", e);
return 0;
}
ExecuteOperationMetadata executingMetadata = metadata.toBuilder()
.setStage(ExecutionStage.Value.EXECUTING)
.build();
long startedAt = System.currentTimeMillis();
Operation operation = operationContext.operation.toBuilder()
.setMetadata(Any.pack(ExecutingOperationMetadata.newBuilder()
.setStartedAt(startedAt)
.setExecutingOn(workerContext.getName())
.setExecuteOperationMetadata(executingMetadata)
.setRequestMetadata(operationContext.queueEntry.getExecuteEntry().getRequestMetadata())
.build()))
.build();
boolean operationUpdateSuccess = false;
try {
operationUpdateSuccess = workerContext.putOperation(operation, operationContext.action);
} catch (IOException e) {
logger.log(SEVERE, format("error putting operation %s as EXECUTING", operation.getName()), e);
}
if (!operationUpdateSuccess) {
logger.warning(
String.format(
"Executor::run(%s): could not transition to EXECUTING",
operation.getName()));
try {
workerContext.destroyExecDir(operationContext.execDir);
} catch (IOException e) {
logger.log(SEVERE, "error while destroying " + operationContext.execDir, e);
}
owner.error().put(operationContext);
return 0;
}
Duration timeout;
if (operationContext.action.hasTimeout()) {
timeout = operationContext.action.getTimeout();
} else {
timeout = null;
}
if (timeout == null && workerContext.hasDefaultActionTimeout()) {
timeout = workerContext.getDefaultActionTimeout();
}
Deadline pollDeadline;
if (timeout == null) {
pollDeadline = Deadline.after(10, DAYS);
} else {
pollDeadline = Deadline.after(
// 10s of padding for the timeout in question, so that we can guarantee cleanup
(timeout.getSeconds() + 10) * 1000000 + timeout.getNanos() / 1000,
MICROSECONDS);
}
workerContext.resumePoller(
operationContext.poller,
"Executor",
operationContext.queueEntry,
ExecutionStage.Value.EXECUTING,
Thread.currentThread()::interrupt,
pollDeadline);
try {
return executePolled(operation, timeout, stopwatch);
} finally {
operationContext.poller.pause();
}
}
private long executePolled(
Operation operation,
Duration timeout,
Stopwatch stopwatch) throws InterruptedException {
/* execute command */
workerContext.logInfo("Executor: Operation " + operation.getName() + " Executing command");
Platform platform = operationContext.command.getPlatform();
ImmutableList.Builder<ExecutionPolicy> policies = ImmutableList.builder();
ExecutionPolicy defaultPolicy = workerContext.getExecutionPolicy("");
if (defaultPolicy != null) {
policies.add(defaultPolicy);
}
for (Property property : platform.getPropertiesList()) {
if (property.getName().equals("execution-policy")) {
policies.add(workerContext.getExecutionPolicy(property.getValue()));
}
}
ActionResult.Builder resultBuilder = operationContext.executeResponse
.getResultBuilder();
resultBuilder.getExecutionMetadataBuilder()
.setExecutionStartTimestamp(Timestamps.fromMillis(System.currentTimeMillis()));
Code statusCode;
try {
statusCode = executeCommand(
operation.getName(),
operationContext.execDir,
operationContext.command,
timeout,
"", // executingMetadata.getStdoutStreamName(),
"", // executingMetadata.getStderrStreamName(),
resultBuilder,
policies.build());
} catch (IOException e) {
logger.log(SEVERE, "error executing operation " + operation.getName(), e);
operationContext.poller.pause();
owner.error().put(operationContext);
return 0;
}
resultBuilder.getExecutionMetadataBuilder()
.setExecutionCompletedTimestamp(Timestamps.fromMillis(System.currentTimeMillis()));
long executeUSecs = stopwatch.elapsed(MICROSECONDS);
logger.info(
String.format(
"Executor::executeCommand(%s): Completed command: exit code %d",
operation.getName(),
resultBuilder.getExitCode()));
operationContext.executeResponse.getStatusBuilder()
.setCode(statusCode.getNumber());
OperationContext reportOperationContext = operationContext.toBuilder()
.setOperation(operation)
.build();
boolean claimed = owner.output().claim();
operationContext.poller.pause();
if (claimed) {
try {
owner.output().put(reportOperationContext);
} catch (InterruptedException e) {
owner.output().release();
throw e;
}
} else {
// FIXME we need to release the action root
workerContext.logInfo("Executor: Operation " + operation.getName() + " Failed to claim output");
owner.error().put(operationContext);
}
return stopwatch.elapsed(MICROSECONDS) - executeUSecs;
}
@Override
public void run() {
long stallUSecs = 0;
Stopwatch stopwatch = Stopwatch.createStarted();
String operationName = operationContext.operation.getName();
try {
stallUSecs = runInterruptible(stopwatch);
} catch (InterruptedException e) {
/* we can be interrupted when the poller fails */
try {
owner.error().put(operationContext);
} catch (InterruptedException errorEx) {
logger.log(SEVERE, "interrupted while erroring " + operationName, errorEx);
} finally {
Thread.currentThread().interrupt();
}
} catch (Exception e) {
logger.log(SEVERE, "errored during execution of " + operationName, e);
try {
owner.error().put(operationContext);
} catch (InterruptedException errorEx) {
logger.log(SEVERE, format("interrupted while erroring %s after error", operationName), errorEx);
} catch (Throwable t) {
logger.log(SEVERE, format("errored while erroring %s after error", operationName), t);
}
throw e;
} finally {
boolean wasInterrupted = Thread.interrupted();
try {
owner.releaseExecutor(
operationName,
stopwatch.elapsed(MICROSECONDS),
stallUSecs,
exitCode);
} finally {
if (wasInterrupted) {
Thread.currentThread().interrupt();
}
}
}
}
private Code executeCommand(
String operationName,
Path execDir,
Command command,
Duration timeout,
String stdoutStreamName,
String stderrStreamName,
ActionResult.Builder resultBuilder,
Iterable<ExecutionPolicy> policies)
throws IOException, InterruptedException {
ImmutableList.Builder<String> arguments = ImmutableList.builder();
arguments.addAll(
transform(
filter(policies, (policy) -> policy.getPolicyCase() == WRAPPER),
(policy) -> policy.getWrapper().getPath()));
arguments.addAll(command.getArgumentsList());
ProcessBuilder processBuilder =
new ProcessBuilder(arguments.build())
.directory(execDir.toAbsolutePath().toFile());
Map<String, String> environment = processBuilder.environment();
environment.clear();
for (Command.EnvironmentVariable environmentVariable : command.getEnvironmentVariablesList()) {
environment.put(environmentVariable.getName(), environmentVariable.getValue());
}
final Write stdoutWrite, stderrWrite;
if (stdoutStreamName != null && !stdoutStreamName.isEmpty() && workerContext.getStreamStdout()) {
stdoutWrite = workerContext.getOperationStreamWrite(stdoutStreamName);
} else {
stdoutWrite = new NullWrite();
}
if (stderrStreamName != null && !stderrStreamName.isEmpty() && workerContext.getStreamStderr()) {
stderrWrite = workerContext.getOperationStreamWrite(stderrStreamName);
} else {
stderrWrite = new NullWrite();
}
long startNanoTime = System.nanoTime();
Process process;
try {
synchronized (this) {
process = processBuilder.start();
}
process.getOutputStream().close();
} catch(IOException e) {
logger.log(SEVERE, "error starting process for " + operationName, e);
// again, should we do something else here??
resultBuilder.setExitCode(INCOMPLETE_EXIT_CODE);
return Code.INVALID_ARGUMENT;
}
stdoutWrite.reset();
stderrWrite.reset();
ByteStringWriteReader stdoutReader = new ByteStringWriteReader(
process.getInputStream(), stdoutWrite);
ByteStringWriteReader stderrReader = new ByteStringWriteReader(
process.getErrorStream(), stderrWrite);
Thread stdoutReaderThread = new Thread(stdoutReader);
Thread stderrReaderThread = new Thread(stderrReader);
stdoutReaderThread.start();
stderrReaderThread.start();
Code statusCode = Code.OK;
try {
if (timeout == null) {
exitCode = process.waitFor();
} else {
long timeoutNanos = timeout.getSeconds() * 1000000000L + timeout.getNanos();
long remainingNanoTime = timeoutNanos - (System.nanoTime() - startNanoTime);
if (process.waitFor(remainingNanoTime, TimeUnit.NANOSECONDS)) {
exitCode = process.exitValue();
} else {
logger.info("process timed out for " + operationName);
process.destroy();
if (!process.waitFor(1, TimeUnit.SECONDS)) {
logger.info(format("process did not respond to termination for %s, killing it", operationName));
process.destroyForcibly();
process.waitFor(100, TimeUnit.MILLISECONDS); // fair trade, i think
}
statusCode = Code.DEADLINE_EXCEEDED;
}
}
} catch (InterruptedException e) {
process.destroy();
if (!process.waitFor(1, TimeUnit.SECONDS)) {
process.destroyForcibly();
process.waitFor(100, TimeUnit.MILLISECONDS);
}
throw e;
}
stdoutReaderThread.join();
stderrReaderThread.join();
resultBuilder
.setExitCode(exitCode)
.setStdoutRaw(stdoutReader.getData())
.setStderrRaw(stderrReader.getData());
return statusCode;
}
}
| Remove duplicated exec dir destroy in executor (#292)
The removal of exec dirs is automatically handled through
ExecuteActionStage's error path, the Executor does not need to perform
it directly. | src/main/java/build/buildfarm/worker/Executor.java | Remove duplicated exec dir destroy in executor (#292) | <ide><path>rc/main/java/build/buildfarm/worker/Executor.java
<ide> String.format(
<ide> "Executor::run(%s): could not transition to EXECUTING",
<ide> operation.getName()));
<del> try {
<del> workerContext.destroyExecDir(operationContext.execDir);
<del> } catch (IOException e) {
<del> logger.log(SEVERE, "error while destroying " + operationContext.execDir, e);
<del> }
<ide> owner.error().put(operationContext);
<ide> return 0;
<ide> } |
|
JavaScript | mit | 882967652b66d0220d67f67280f48bc511da6b4d | 0 | cblanc/postcodes.io,ideal-postcodes/postcodes.io,cblanc/postcodes.io,cblanc/postcodes.io,goingdotin/postcodes.io,goingdotin/postcodes.io,ideal-postcodes/postcodes.io,goingdotin/postcodes.io,ideal-postcodes/postcodes.io | "use strict";
const path = require("path");
const async = require("async");
const assert = require("chai").assert;
const helper = require(`${__dirname}/helper`);
const Place = helper.Place;
describe("Place Model", () => {
let testPostcode, testOutcode;
before(function (done) {
this.timeout(0);
async.series([
helper.clearPostcodeDb,
helper.seedPostcodeDb
], done);
});
after(helper.clearPostcodeDb);
describe("#findByCode", () => {
const testCode = "osgb4000000074559125";
it ("returns place by code", done => {
Place.findByCode(testCode, (error, result) => {
if (error) return done(error);
helper.isRawPlaceObject(result);
done();
});
});
it ("is case insensitive", done => {
Place.findByCode(testCode.toUpperCase(), (error, result) => {
if (error) return done(error);
helper.isRawPlaceObject(result);
done();
});
});
it ("returns null if no match", done => {
Place.findByCode("12", (error, result) => {
if (error) return done(error);
assert.isNull(result);
done();
});
});
it ("returns null if code not string", done => {
Place.findByCode(12, (error, result) => {
if (error) return done(error);
assert.isNull(result);
done();
});
});
});
describe("#search", () => {
it ("returns a list of places for given search term", done => {
Place.search({ name: "b" }, (error, results) => {
if (error) return done(error);
results.forEach(helper.isRawPlaceObject);
done();
});
});
it ("returns null if no query", done => {
Place.search({}, (error, results) => {
if (error) return done(error);
assert.isNull(results);
done();
});
});
it ("is sensitive to limit", done => {
Place.search({
name: "b",
limit: 1
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
done();
});
});
it ("returns up to 10 results by default", done => {
Place.search({ name: "b" }, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 10);
results.forEach(helper.isRawPlaceObject);
done();
});
});
it ("uses default limit if invalid limit supplied", done => {
Place.search({
name: "b",
limit: -1
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 10);
results.forEach(helper.isRawPlaceObject);
done();
});
});
it ("searches with name_2", done => {
const name = "East Kilbride";
Place.search({ name: name }, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_2, name);
done();
});
});
describe("result specs", () => {
it ("returns names with apostrophes", done => {
const name = "Taobh a' Chaolais";
Place.search({
name: name.replace(/'/g, "")
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("returns names with non-ascii characters", done => {
const name = "Mynydd-llêch";
Place.search({
name: name.replace("ê", "e")
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("returns names with hyphens", done => {
const name = "Llwyn-y-groes";
Place.search({
name: name.replace(/-/g, " ")
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
});
describe("query specs", () => {
it ("is case insensitive", done => {
const name = "Corston";
Place.search({
name: name.toUpperCase()
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("handles apostrophes", done => {
const name = "Taobh a' Chaolais";
Place.search({
name: name
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("handles non-ascii characters", done => {
const name = "Mynydd-llêch";
Place.search({ name: name }, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("handles hyphens as spaces", done => {
const name = "Llwyn-y-groes";
Place.search({ name: name }, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
});
});
describe("toJson", () => {
it ("formats place object for public consumption", done => {
const testCode = "osgb4000000074559125";
Place.findByCode(testCode, (error, place) => {
if (error) return done(error);
helper.isRawPlaceObject(place);
const formatted = Place.toJson(place);
helper.isPlaceObject(formatted);
done();
});
});
});
});
| tests/place.unit.js | "use strict";
const path = require("path");
const async = require("async");
const assert = require("chai").assert;
const helper = require(`${__dirname}/helper`);
const Place = helper.Place;
describe("Place Model", () => {
let testPostcode, testOutcode;
before(function (done) {
this.timeout(0);
async.series([
helper.clearPostcodeDb,
helper.seedPostcodeDb
], done);
});
after(helper.clearPostcodeDb);
describe("#findByCode", () => {
const testCode = "osgb4000000074559125";
it ("returns place by code", done => {
Place.findByCode(testCode, (error, result) => {
if (error) return done(error);
helper.isRawPlaceObject(result);
done();
});
});
it ("is case insensitive", done => {
Place.findByCode(testCode.toUpperCase(), (error, result) => {
if (error) return done(error);
helper.isRawPlaceObject(result);
done();
});
});
it ("returns null if no match", done => {
Place.findByCode("12", (error, result) => {
if (error) return done(error);
assert.isNull(result);
done();
});
});
it ("returns null if code not string", done => {
Place.findByCode(12, (error, result) => {
if (error) return done(error);
assert.isNull(result);
done();
});
});
});
describe("#search", () => {
it ("returns a list of places for given search term", done => {
Place.search({ name: "b" }, (error, results) => {
if (error) return done(error);
results.forEach(helper.isRawPlaceObject);
done();
});
});
it ("returns null if no query", done => {
Place.search({}, (error, results) => {
if (error) return done(error);
assert.isNull(results);
done();
});
});
it ("is sensitive to limit", done => {
Place.search({
name: "b",
limit: 1
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
done();
});
});
it ("returns up to 10 results by default", done => {
Place.search({ name: "b" }, (error, results) => {
if (error) return done(error);
assert.isTrue(results.length < 10);
results.forEach(helper.isRawPlaceObject);
done();
});
});
it ("uses default limit if invalid limit supplied", done => {
Place.search({
name: "b",
limit: -1
}, (error, results) => {
if (error) return done(error);
assert.isTrue(results.length < 10);
results.forEach(helper.isRawPlaceObject);
done();
});
});
it ("searches with name_2", done => {
const name = "East Kilbride";
Place.search({ name: name }, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_2, name);
done();
});
});
describe("result specs", () => {
it ("returns names with apostrophes", done => {
const name = "Taobh a' Chaolais";
Place.search({
name: name.replace(/'/g, "")
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("returns names with non-ascii characters", done => {
const name = "Mynydd-llêch";
Place.search({
name: name.replace("ê", "e")
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("returns names with hyphens", done => {
const name = "Llwyn-y-groes";
Place.search({
name: name.replace(/-/g, " ")
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
});
describe("query specs", () => {
it ("is case insensitive", done => {
const name = "Corston";
Place.search({
name: name.toUpperCase()
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("handles apostrophes", done => {
const name = "Taobh a' Chaolais";
Place.search({
name: name
}, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("handles non-ascii characters", done => {
const name = "Mynydd-llêch";
Place.search({ name: name }, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
it ("handles hyphens as spaces", done => {
const name = "Llwyn-y-groes";
Place.search({ name: name }, (error, results) => {
if (error) return done(error);
assert.equal(results.length, 1);
results.forEach(helper.isRawPlaceObject);
assert.equal(results[0].name_1, name);
done();
});
});
});
});
describe("toJson", () => {
it ("formats place object for public consumption", done => {
const testCode = "osgb4000000074559125";
Place.findByCode(testCode, (error, place) => {
if (error) return done(error);
helper.isRawPlaceObject(place);
const formatted = Place.toJson(place);
helper.isPlaceObject(formatted);
done();
});
});
});
});
| Fix tests
| tests/place.unit.js | Fix tests | <ide><path>ests/place.unit.js
<ide> it ("returns up to 10 results by default", done => {
<ide> Place.search({ name: "b" }, (error, results) => {
<ide> if (error) return done(error);
<del> assert.isTrue(results.length < 10);
<add> assert.equal(results.length, 10);
<ide> results.forEach(helper.isRawPlaceObject);
<ide> done();
<ide> });
<ide> limit: -1
<ide> }, (error, results) => {
<ide> if (error) return done(error);
<del> assert.isTrue(results.length < 10);
<add> assert.equal(results.length, 10);
<ide> results.forEach(helper.isRawPlaceObject);
<ide> done();
<ide> }); |
|
Java | apache-2.0 | 8051f649426fedfc0918575975a44ccdf946445d | 0 | kingsleyadio/android_commons,kingsleyadio/android_commons | lib_appcommons/src/main/java/ng/kingsley/android/helper/PersistenceHelper.java | package ng.kingsley.android.helper;
import android.app.Application;
import android.content.Context;
import android.content.SharedPreferences;
import android.text.TextUtils;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
* @author ADIO Kingsley O.
* @since 13 Aug, 2015
*/
@Singleton
public class PersistenceHelper {
private final Context mContext;
private final Gson GSON;
@Inject
public PersistenceHelper(Application context, Gson gson) {
mContext = context;
GSON = gson;
}
private static String getName(Context context, String domain) {
if (TextUtils.isEmpty(domain)) {
return context.getPackageName();
}
return context.getPackageName() + "_" + domain;
}
public <T> void persist(String domain, String key, T object) {
SharedPreferences.Editor editor = mContext.getSharedPreferences(getName(mContext, domain),
Context.MODE_PRIVATE).edit();
String string = GSON.toJson(object, new TypeToken<T>() {
}.getType());
editor.putString(key, string);
editor.apply();
}
public <T> void persist(String key, T object) {
persist(null, key, object);
}
public <T> T retrieve(String domain, String key, Class<T> clas) {
SharedPreferences pref = mContext.getSharedPreferences(getName(mContext, domain), Context.MODE_PRIVATE);
String object = pref.getString(key, null);
return GSON.fromJson(object, clas);
}
public <T> T retrieve(String key, Class<T> clas) {
return retrieve(null, key, clas);
}
public <T> T retrieve(String domain, String key, Class<T> clas, T defaultValue) {
T object = retrieve(domain, key, clas);
return (object == null) ? defaultValue : object;
}
public <T> T retrieve(String key, Class<T> clas, T defaultValue) {
return retrieve(null, key, clas, defaultValue);
}
public <T> T retrieve(String domain, String key, Type type) {
SharedPreferences pref = mContext.getSharedPreferences(getName(mContext, domain), Context.MODE_PRIVATE);
String object = pref.getString(key, null);
return GSON.fromJson(object, type);
}
public <T> T retrieve(String key, Type type) {
return retrieve(null, key, type);
}
public <T> T retrieve(String domain, String key, Type type, T defaultValue) {
T object = retrieve(domain, key, type);
return (object == null) ? defaultValue : object;
}
public <T> T retrieve(String key, Type type, T defaultValue) {
return retrieve(null, key, type, defaultValue);
}
public void wipe(String domain, String key) {
SharedPreferences.Editor editor = mContext.getSharedPreferences(getName(mContext, domain),
Context.MODE_PRIVATE).edit();
editor.remove(key);
editor.apply();
}
public void wipe(String key) {
wipe(null, key);
}
}
| Delete PersistenceHelper [will be replaced by a more robust implementation]
| lib_appcommons/src/main/java/ng/kingsley/android/helper/PersistenceHelper.java | Delete PersistenceHelper [will be replaced by a more robust implementation] | <ide><path>ib_appcommons/src/main/java/ng/kingsley/android/helper/PersistenceHelper.java
<del>package ng.kingsley.android.helper;
<del>
<del>import android.app.Application;
<del>import android.content.Context;
<del>import android.content.SharedPreferences;
<del>import android.text.TextUtils;
<del>
<del>import com.google.gson.Gson;
<del>import com.google.gson.reflect.TypeToken;
<del>
<del>import java.lang.reflect.Type;
<del>
<del>import javax.inject.Inject;
<del>import javax.inject.Singleton;
<del>
<del>/**
<del> * @author ADIO Kingsley O.
<del> * @since 13 Aug, 2015
<del> */
<del>@Singleton
<del>public class PersistenceHelper {
<del>
<del> private final Context mContext;
<del> private final Gson GSON;
<del>
<del> @Inject
<del> public PersistenceHelper(Application context, Gson gson) {
<del> mContext = context;
<del> GSON = gson;
<del> }
<del>
<del> private static String getName(Context context, String domain) {
<del> if (TextUtils.isEmpty(domain)) {
<del> return context.getPackageName();
<del> }
<del> return context.getPackageName() + "_" + domain;
<del> }
<del>
<del> public <T> void persist(String domain, String key, T object) {
<del> SharedPreferences.Editor editor = mContext.getSharedPreferences(getName(mContext, domain),
<del> Context.MODE_PRIVATE).edit();
<del> String string = GSON.toJson(object, new TypeToken<T>() {
<del> }.getType());
<del> editor.putString(key, string);
<del> editor.apply();
<del> }
<del>
<del> public <T> void persist(String key, T object) {
<del> persist(null, key, object);
<del> }
<del>
<del> public <T> T retrieve(String domain, String key, Class<T> clas) {
<del> SharedPreferences pref = mContext.getSharedPreferences(getName(mContext, domain), Context.MODE_PRIVATE);
<del> String object = pref.getString(key, null);
<del> return GSON.fromJson(object, clas);
<del> }
<del>
<del> public <T> T retrieve(String key, Class<T> clas) {
<del> return retrieve(null, key, clas);
<del> }
<del>
<del> public <T> T retrieve(String domain, String key, Class<T> clas, T defaultValue) {
<del> T object = retrieve(domain, key, clas);
<del> return (object == null) ? defaultValue : object;
<del> }
<del>
<del> public <T> T retrieve(String key, Class<T> clas, T defaultValue) {
<del> return retrieve(null, key, clas, defaultValue);
<del> }
<del>
<del> public <T> T retrieve(String domain, String key, Type type) {
<del> SharedPreferences pref = mContext.getSharedPreferences(getName(mContext, domain), Context.MODE_PRIVATE);
<del> String object = pref.getString(key, null);
<del> return GSON.fromJson(object, type);
<del> }
<del>
<del> public <T> T retrieve(String key, Type type) {
<del> return retrieve(null, key, type);
<del> }
<del>
<del> public <T> T retrieve(String domain, String key, Type type, T defaultValue) {
<del> T object = retrieve(domain, key, type);
<del> return (object == null) ? defaultValue : object;
<del> }
<del>
<del> public <T> T retrieve(String key, Type type, T defaultValue) {
<del> return retrieve(null, key, type, defaultValue);
<del> }
<del>
<del>
<del> public void wipe(String domain, String key) {
<del> SharedPreferences.Editor editor = mContext.getSharedPreferences(getName(mContext, domain),
<del> Context.MODE_PRIVATE).edit();
<del> editor.remove(key);
<del> editor.apply();
<del> }
<del>
<del> public void wipe(String key) {
<del> wipe(null, key);
<del> }
<del>} |
||
Java | mit | 011aaa9b2f7a139ad1561ac5340f21c2ba100b62 | 0 | skcodeworks/MissionInterview | package org.santhoshkumar.DynamicProgramming;
/**
* Created by sk010962 on 10/14/2015.
*/
public class RodCuttingMaxProfit {
int[] cost;
//Top down DP solution
public int solve(){
int[][] matrix = new int[cost.length+1][cost.length+1];
for(int i= 0; i <= cost.length; i++){
matrix[i][0] = 0;
matrix[0][i] = 0;
}
//Fill the top row with cost for respective size length cut
for(int i= 1; i <= cost.length; i++){
matrix[0][i] = cost[i-1];
}
for(int i = 1; i <= cost.length; i++){
for(int j = 1; j <= cost.length; j++){
if(i >= j) {
matrix[i][j] = Math.max(matrix[i-1][j], matrix[i][j-i]+cost[i-1]);
}else{
matrix[i][j]= matrix[i-1][j];
}
}
}
return matrix[cost.length][cost.length];
}
// Top down without recursion
public int solveMax(){
int[] sol = new int[cost.length+1];
sol[0] = -1;
for(int i = 1; i <= cost.length; i++){
for( int j = i ; j <= cost.length; j++){
sol[j] = Math.max(sol[j], sol[j-i] + cost[i-1]);
}
}
return sol[cost.length];
}
// Top down using recursion
public int solveMaxRecur(int length){
if(length <= 0){
return 0;
}
int max = 0;
for(int i=0; i < length; i++){
max = Math.max(max, cost[i] + solveMaxRecur(length - i - 1));
}
return max;
}
}
| src/org/santhoshkumar/DynamicProgramming/RodCuttingMaxProfit.java | package org.santhoshkumar.DynamicProgramming;
/**
* Created by sk010962 on 10/14/2015.
*/
public class RodCuttingMaxProfit {
int[] cost;
//Top down DP solution
public int solve(){
int[][] matrix = new int[cost.length+1][cost.length+1];
for(int i= 0; i <= cost.length; i++){
matrix[i][0] = 0;
matrix[0][i] = 0;
}
//Fill the top row with cost for respective size length cut
for(int i= 1; i <= cost.length; i++){
matrix[0][i] = cost[i-1];
}
for(int i = 1; i <= cost.length; i++){
for(int j = 1; j <= cost.length; j++){
if(i >= j) {
matrix[i][j] = Math.max(matrix[i-1][j], matrix[i][j-i]+cost[i-1]);
}else{
matrix[i][j]= matrix[i-1][j];
}
}
}
return matrix[cost.length][cost.length];
}
// Top down without recursion
public int solveMax(){
return -1;
}
// Top down using recursion
public int solveMaxRecursive(){
return -1;
}
}
| rod cutting problem
| src/org/santhoshkumar/DynamicProgramming/RodCuttingMaxProfit.java | rod cutting problem | <ide><path>rc/org/santhoshkumar/DynamicProgramming/RodCuttingMaxProfit.java
<ide>
<ide> // Top down without recursion
<ide> public int solveMax(){
<del> return -1;
<add> int[] sol = new int[cost.length+1];
<add> sol[0] = -1;
<add>
<add> for(int i = 1; i <= cost.length; i++){
<add> for( int j = i ; j <= cost.length; j++){
<add> sol[j] = Math.max(sol[j], sol[j-i] + cost[i-1]);
<add> }
<add> }
<add> return sol[cost.length];
<ide> }
<ide>
<ide> // Top down using recursion
<del> public int solveMaxRecursive(){
<del> return -1;
<add> public int solveMaxRecur(int length){
<add> if(length <= 0){
<add> return 0;
<add> }
<add> int max = 0;
<add> for(int i=0; i < length; i++){
<add> max = Math.max(max, cost[i] + solveMaxRecur(length - i - 1));
<add> }
<add> return max;
<ide> }
<ide> } |
|
Java | mit | error: pathspec 'src/com/jbotelho/pyirc2/client/PushSubscription.java' did not match any file(s) known to git
| d0b68b3185009afbde66aa2872d96c59f2e71fbd | 1 | uniite/pyirc2_client | package com.jbotelho.pyirc2.client;
public class PushSubscription {
int id;
String dataType;
NumberOrText[] target;
}
| src/com/jbotelho/pyirc2/client/PushSubscription.java | Added PushSubscription
| src/com/jbotelho/pyirc2/client/PushSubscription.java | Added PushSubscription | <ide><path>rc/com/jbotelho/pyirc2/client/PushSubscription.java
<add>package com.jbotelho.pyirc2.client;
<add>
<add>
<add>public class PushSubscription {
<add> int id;
<add> String dataType;
<add> NumberOrText[] target;
<add>} |
|
Java | mit | error: pathspec 'src/main/java/svc/models/EmploymentEducation.java' did not match any file(s) known to git
| cebe269f1afa201bdac3992920d9952764d6407a | 1 | gh6-team/less-homelessness-api | package svc.models;
import java.util.Date;
public class EmploymentEducation {
public int id;
public int project_entry_id;
public int personal_id;
public Date information_date;
public Integer last_grade_completed;
public Integer school_status;
public Integer employed_status;
public Integer employment_type;
public Integer not_employed_reason;
public int data_collection_stage;
public Date date_created;
public Date date_updated;
public int user_id;
public Date date_deleted;
public int export_id;
}
| src/main/java/svc/models/EmploymentEducation.java | Added EmploymentEducation
| src/main/java/svc/models/EmploymentEducation.java | Added EmploymentEducation | <ide><path>rc/main/java/svc/models/EmploymentEducation.java
<add>package svc.models;
<add>
<add>import java.util.Date;
<add>
<add>public class EmploymentEducation {
<add> public int id;
<add> public int project_entry_id;
<add> public int personal_id;
<add> public Date information_date;
<add> public Integer last_grade_completed;
<add> public Integer school_status;
<add> public Integer employed_status;
<add> public Integer employment_type;
<add> public Integer not_employed_reason;
<add> public int data_collection_stage;
<add> public Date date_created;
<add> public Date date_updated;
<add> public int user_id;
<add> public Date date_deleted;
<add> public int export_id;
<add>} |
|
Java | apache-2.0 | 6f67b3c3fd31d58f8bb84f7315a7328cc77810c8 | 0 | maxmind/minfraud-api-java,maxmind/minfraud-api-java | package com.maxmind.minfraud.response;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* This class contains minFraud response data related to the credit card.
*/
public final class CreditCard {
protected Issuer issuer = new Issuer();
protected String country;
@JsonProperty("is_issued_in_billing_address_country")
protected Boolean isIssuedInBillingAddressCountry;
@JsonProperty("is_prepaid")
protected Boolean isPrepaid;
/**
* @return The {@code Issuer} model object.
*/
public final Issuer getIssuer() {
return issuer;
}
/**
* @return The two letter <a href="http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2">
* ISO 3166-1 alpha-2</a> country code associated with the location
* of the majority of customers using this credit card as determined
* by their billing address. In cases where the location of customers
* is highly mixed, this defaults to the country of the bank issuing
* the card.
*/
public final String getCountry() {
return country;
}
/**
* @return True if the country of the billing address matches the country
* of the majority of customers using that IIN. In cases where the
* location of customers is highly mixed, the match is to the country of
* the bank issuing the card.
*/
@JsonIgnore
public final Boolean isIssuedInBillingAddressCountry() {
return isIssuedInBillingAddressCountry;
}
/**
* @return True if the card is a prepaid card. False if not prepaid. If
* the IIN was not provided or is unknown, null will be returned.
*/
@JsonIgnore
public final Boolean isPrepaid() {
return isPrepaid;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("CreditCard{");
sb.append("issuer=").append(this.issuer);
sb.append(", country='").append(this.country).append('\'');
sb.append(", isIssuedInBillingAddressCountry=").append(this.isIssuedInBillingAddressCountry);
sb.append(", isPrepaid=").append(this.isPrepaid);
sb.append('}');
return sb.toString();
}
}
| src/main/java/com/maxmind/minfraud/response/CreditCard.java | package com.maxmind.minfraud.response;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* This class contains minFraud response data related to the credit card.
*/
public final class CreditCard {
protected Issuer issuer = new Issuer();
protected String country;
@JsonProperty("is_issued_in_billing_address_country")
protected Boolean isIssuedInBillingAddressCountry;
@JsonProperty("is_prepaid")
protected Boolean isPrepaid;
/**
* @return The {@code Issuer} model object.
*/
public final Issuer getIssuer() {
return issuer;
}
/**
* @return This field contains an ISO 3166-1 alpha-2 country code
* representing the country that the card was issued in. This will be null
* if there is no value in the response.
*/
public final String getCountry() {
return country;
}
/**
* @return This will return true if the country of the billing address
* matches the country that the credit card was issued in. It will return
* false if they do not match. If the billing country was not provided or
* the issuer country could not be determined, null will be returned.
*/
@JsonIgnore
public final Boolean isIssuedInBillingAddressCountry() {
return isIssuedInBillingAddressCountry;
}
/**
* @return True if the card is a prepaid card. False if not prepaid. If
* the IIN was not provided or is unknown, null will be returned.
*/
@JsonIgnore
public final Boolean isPrepaid() {
return isPrepaid;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("CreditCard{");
sb.append("issuer=").append(this.issuer);
sb.append(", country='").append(this.country).append('\'');
sb.append(", isIssuedInBillingAddressCountry=").append(this.isIssuedInBillingAddressCountry);
sb.append(", isPrepaid=").append(this.isPrepaid);
sb.append('}');
return sb.toString();
}
}
| Update credit card property descriptions to be more accurate
| src/main/java/com/maxmind/minfraud/response/CreditCard.java | Update credit card property descriptions to be more accurate | <ide><path>rc/main/java/com/maxmind/minfraud/response/CreditCard.java
<ide> }
<ide>
<ide> /**
<del> * @return This field contains an ISO 3166-1 alpha-2 country code
<del> * representing the country that the card was issued in. This will be null
<del> * if there is no value in the response.
<add> * @return The two letter <a href="http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2">
<add> * ISO 3166-1 alpha-2</a> country code associated with the location
<add> * of the majority of customers using this credit card as determined
<add> * by their billing address. In cases where the location of customers
<add> * is highly mixed, this defaults to the country of the bank issuing
<add> * the card.
<ide> */
<ide> public final String getCountry() {
<ide> return country;
<ide> }
<ide>
<ide> /**
<del> * @return This will return true if the country of the billing address
<del> * matches the country that the credit card was issued in. It will return
<del> * false if they do not match. If the billing country was not provided or
<del> * the issuer country could not be determined, null will be returned.
<add> * @return True if the country of the billing address matches the country
<add> * of the majority of customers using that IIN. In cases where the
<add> * location of customers is highly mixed, the match is to the country of
<add> * the bank issuing the card.
<ide> */
<ide> @JsonIgnore
<ide> public final Boolean isIssuedInBillingAddressCountry() { |
|
Java | apache-2.0 | d6dfd75c57037c07c37a35e626168e25eb18f4ac | 0 | treason258/TreCore,treason258/TreCore | package com.mjiayou.trecorelib.base;
/*
_ooOoo_
o8888888o
88" . "88
(| -_- |)
O\ = /O
____/`---'\____
.' \\| |// `.
/ \\||| : |||// \
/ _||||| -:- |||||- \
| | \\\ - /// | |
| \_| ''\---/'' | |
\ .-\__ `-` ___/-. /
___`. .' /--.--\ `. . __
."" '< `.___\_<|>_/___.' >'"".
| | : `- \`.;`\ _ /`;.`/ - ` : | |
\ \ `-. \_ __\ /__ _/ .-` / /
======`-.____`-.___\_____/___.-`____.-'======
`=---='
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
佛祖保佑 永无BUG
*/
import android.app.Application;
import android.content.Context;
import android.content.res.Configuration;
import com.mjiayou.trecorelib.common.Caches;
import com.mjiayou.trecorelib.helper.GsonHelper;
import com.mjiayou.trecorelib.helper.VolleyHelper;
import com.mjiayou.trecorelib.util.LogUtils;
import com.mjiayou.trecorelib.util.ProcessUtil;
import com.mjiayou.trecorelib.util.VersionUtil;
/**
* TCApp
*/
public class TCApp extends Application {
// TAG
protected static final String TAG = TCApp.class.getSimpleName();
// var
private static Application mInstance;
private Context mContext;
/**
* 获取Application对象
*/
public static Application get() {
return mInstance;
}
/**
* onCreateManual
*/
public static void onCreateManual(Application application) {
mInstance = application;
TCApp tcApp = new TCApp();
tcApp.mContext = application.getApplicationContext();
tcApp.initApp();
}
@Override
public void onCreate() {
LogUtils.printLifeRecycle(TAG, "onCreate");
super.onCreate();
// var
mInstance = this;
mContext = getApplicationContext();
LogUtils.printLifeRecycle(TAG, "process id -> " + ProcessUtil.getProcessId());
LogUtils.printLifeRecycle(TAG, "process name -> " + ProcessUtil.getProcessName(mContext));
initApp();
}
@Override
public void onTerminate() {
LogUtils.printLifeRecycle(TAG, "onTerminate");
super.onTerminate();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
LogUtils.printLifeRecycle(TAG, "onConfigurationChanged | newConfig -> " + newConfig.toString());
super.onConfigurationChanged(newConfig);
}
@Override
public void onLowMemory() {
LogUtils.printLifeRecycle(TAG, "onLowMemory");
super.onLowMemory();
}
@Override
public void onTrimMemory(int level) {
LogUtils.printLifeRecycle(TAG, "onTrimMemory | level -> " + level);
super.onTrimMemory(level);
}
/**
* 初始化APP
*/
public void initApp() {
LogUtils.printLifeRecycle(TAG, "initApp");
LogUtils.printInit(TAG);
/**
* 初始化 配置信息
*/
Caches.get().init();
VersionUtil.init();
/**
* 初始化 第三方库
*/
VolleyHelper.init();
GsonHelper.init();
// SwissArmyKnifeUtil.init();
}
// ******************************** project ********************************
}
| TreCoreLib/src/main/java/com/mjiayou/trecorelib/base/TCApp.java | package com.mjiayou.trecorelib.base;
/*
_ooOoo_
o8888888o
88" . "88
(| -_- |)
O\ = /O
____/`---'\____
.' \\| |// `.
/ \\||| : |||// \
/ _||||| -:- |||||- \
| | \\\ - /// | |
| \_| ''\---/'' | |
\ .-\__ `-` ___/-. /
___`. .' /--.--\ `. . __
."" '< `.___\_<|>_/___.' >'"".
| | : `- \`.;`\ _ /`;.`/ - ` : | |
\ \ `-. \_ __\ /__ _/ .-` / /
======`-.____`-.___\_____/___.-`____.-'======
`=---='
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
佛祖保佑 永无BUG
*/
import android.app.Application;
import android.content.Context;
import android.content.res.Configuration;
import com.mjiayou.trecorelib.common.Caches;
import com.mjiayou.trecorelib.helper.GsonHelper;
import com.mjiayou.trecorelib.helper.VolleyHelper;
import com.mjiayou.trecorelib.util.LogUtils;
import com.mjiayou.trecorelib.util.ProcessUtil;
import com.mjiayou.trecorelib.util.VersionUtil;
/**
* TCApp
*/
public class TCApp extends Application {
// TAG
protected static final String TAG = TCApp.class.getSimpleName();
// var
private static TCApp mInstance;
private Context mContext;
/**
* 获取Application对象
*/
public static TCApp get() {
return mInstance;
}
@Override
public void onCreate() {
LogUtils.printLifeRecycle(TAG, "onCreate");
super.onCreate();
// var
mInstance = this;
mContext = getApplicationContext();
LogUtils.printLifeRecycle(TAG, "process id -> " + ProcessUtil.getProcessId());
LogUtils.printLifeRecycle(TAG, "process name -> " + ProcessUtil.getProcessName(mContext));
initApp();
}
@Override
public void onTerminate() {
LogUtils.printLifeRecycle(TAG, "onTerminate");
super.onTerminate();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
LogUtils.printLifeRecycle(TAG, "onConfigurationChanged | newConfig -> " + newConfig.toString());
super.onConfigurationChanged(newConfig);
}
@Override
public void onLowMemory() {
LogUtils.printLifeRecycle(TAG, "onLowMemory");
super.onLowMemory();
}
@Override
public void onTrimMemory(int level) {
LogUtils.printLifeRecycle(TAG, "onTrimMemory | level -> " + level);
super.onTrimMemory(level);
}
/**
* 初始化APP
*/
public void initApp() {
LogUtils.printLifeRecycle(TAG, "initApp");
LogUtils.printInit(TAG);
/**
* 初始化 配置信息
*/
Caches.get().init();
VersionUtil.init();
/**
* 初始化 第三方库
*/
VolleyHelper.init();
GsonHelper.init();
// SwissArmyKnifeUtil.init();
}
// ******************************** project ********************************
}
| 可以不通过继承TCApp的形式使用TreCoreLib
| TreCoreLib/src/main/java/com/mjiayou/trecorelib/base/TCApp.java | 可以不通过继承TCApp的形式使用TreCoreLib | <ide><path>reCoreLib/src/main/java/com/mjiayou/trecorelib/base/TCApp.java
<ide> protected static final String TAG = TCApp.class.getSimpleName();
<ide>
<ide> // var
<del> private static TCApp mInstance;
<add> private static Application mInstance;
<ide> private Context mContext;
<ide>
<ide> /**
<ide> * 获取Application对象
<ide> */
<del> public static TCApp get() {
<add> public static Application get() {
<ide> return mInstance;
<add> }
<add>
<add> /**
<add> * onCreateManual
<add> */
<add> public static void onCreateManual(Application application) {
<add> mInstance = application;
<add> TCApp tcApp = new TCApp();
<add> tcApp.mContext = application.getApplicationContext();
<add> tcApp.initApp();
<ide> }
<ide>
<ide> @Override |
|
Java | mit | b89ef83e90577eb3b89aa32628fcc54dac81b134 | 0 | madumlao/oxAuth,GluuFederation/oxAuth,GluuFederation/oxAuth,GluuFederation/oxAuth,GluuFederation/oxAuth,GluuFederation/oxAuth,madumlao/oxAuth,madumlao/oxAuth,madumlao/oxAuth,madumlao/oxAuth | /*
* oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.xdi.oxauth.uma.ws.rs;
import com.wordnik.swagger.annotations.Api;
import org.gluu.site.ldap.persistence.LdapEntryManager;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.log.Log;
import org.xdi.oxauth.model.common.AuthorizationGrant;
import org.xdi.oxauth.model.common.uma.UmaRPT;
import org.xdi.oxauth.model.config.ConfigurationFactory;
import org.xdi.oxauth.model.error.ErrorResponseFactory;
import org.xdi.oxauth.model.federation.FederationTrust;
import org.xdi.oxauth.model.federation.FederationTrustStatus;
import org.xdi.oxauth.model.registration.Client;
import org.xdi.oxauth.model.uma.RptAuthorizationRequest;
import org.xdi.oxauth.model.uma.RptAuthorizationResponse;
import org.xdi.oxauth.model.uma.UmaConstants;
import org.xdi.oxauth.model.uma.UmaErrorResponseType;
import org.xdi.oxauth.model.uma.persistence.ResourceSetPermission;
import org.xdi.oxauth.model.util.Util;
import org.xdi.oxauth.service.ClientService;
import org.xdi.oxauth.service.FederationDataService;
import org.xdi.oxauth.service.uma.RPTManager;
import org.xdi.oxauth.service.uma.ResourceSetPermissionManager;
import org.xdi.oxauth.service.uma.UmaValidationService;
import org.xdi.oxauth.service.uma.authorization.AuthorizationService;
import org.xdi.oxauth.util.ServerUtil;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import java.util.Date;
import java.util.List;
/**
* The endpoint at which the requester asks for authorization to have a new permission.
*/
@Path("/requester/perm")
@Api(value = "/requester/perm", description = "RPT authorization endpoint. RPT is authorized with new permission(s).")
@Name("rptPermissionAuthorizationRestWebService")
public class RptPermissionAuthorizationWS {
@Logger
private Log log;
@In
private ErrorResponseFactory errorResponseFactory;
@In
private RPTManager rptManager;
@In
private ResourceSetPermissionManager resourceSetPermissionManager;
@In
private UmaValidationService umaValidationService;
@In
private AuthorizationService umaAuthorizationService;
@In
private FederationDataService federationDataService;
@In
private ClientService clientService;
@In
private LdapEntryManager ldapEntryManager;
@POST
@Consumes({UmaConstants.JSON_MEDIA_TYPE})
@Produces({UmaConstants.JSON_MEDIA_TYPE})
public Response requestRptPermissionAuthorization(
@HeaderParam("Authorization") String authorization,
@HeaderParam("Host") String amHost,
RptAuthorizationRequest rptAuthorizationRequest,
@Context HttpServletRequest httpRequest) {
try {
final AuthorizationGrant grant = umaValidationService.assertHasAuthorizationScope(authorization);
final String validatedAmHost = umaValidationService.validateAmHost(amHost);
final UmaRPT rpt = authorizeRptPermission(authorization, rptAuthorizationRequest, httpRequest, grant, validatedAmHost);
// convert manually to avoid possible conflict between resteasy providers, e.g. jettison, jackson
return Response.ok(ServerUtil.asJson(new RptAuthorizationResponse(rpt.getCode()))).build();
} catch (Exception ex) {
log.error("Exception happened", ex);
if (ex instanceof WebApplicationException) {
throw (WebApplicationException) ex;
}
throw new WebApplicationException(Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(errorResponseFactory.getUmaJsonErrorResponse(UmaErrorResponseType.SERVER_ERROR)).build());
}
}
private UmaRPT authorizeRptPermission(String authorization,
RptAuthorizationRequest rptAuthorizationRequest,
HttpServletRequest httpRequest,
AuthorizationGrant grant,
String amHost) {
UmaRPT rpt;
if (Util.isNullOrEmpty(rptAuthorizationRequest.getRpt())) {
rpt = rptManager.createRPT(authorization, amHost, false);
} else {
rpt = rptManager.getRPTByCode(rptAuthorizationRequest.getRpt());
}
// Validate RPT
try {
umaValidationService.validateRPT(rpt);
} catch (WebApplicationException e) {
// according to latest UMA spec ( dated 2015-02-23 https://docs.kantarainitiative.org/uma/draft-uma-core.html)
// it's up to implementation whether to create new RPT for each request or pass back requests RPT.
// Here we decided to pass back new RPT if request's RPT in invalid.
rpt = rptManager.getRPTByCode(rptAuthorizationRequest.getRpt());
}
final ResourceSetPermission resourceSetPermission = resourceSetPermissionManager.getResourceSetPermissionByTicket(rptAuthorizationRequest.getTicket());
// Validate resource set permission
umaValidationService.validateResourceSetPermission(resourceSetPermission);
final Boolean federationEnabled = ConfigurationFactory.instance().getConfiguration().getFederationEnabled();
if (federationEnabled != null && federationEnabled) {
final Client client = clientService.getClient(rpt.getClientId());
final List<FederationTrust> trustList = federationDataService.getTrustByClient(client, FederationTrustStatus.ACTIVE);
if (trustList != null && !trustList.isEmpty()) {
for (FederationTrust t : trustList) {
final Boolean skipAuthorization = t.getSkipAuthorization();
if (skipAuthorization != null && skipAuthorization) {
// grant access directly, client is in trust and skipAuthorization=true
log.trace("grant access directly, client is in trust and skipAuthorization=true");
rptManager.addPermissionToRPT(rpt, resourceSetPermission);
invalidateTicket(resourceSetPermission);
return rpt;
}
}
} else {
log.trace("Forbid RPT authorization - client is not in any trust however federation is enabled on server.");
// throw not authorized exception
throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN)
.entity(errorResponseFactory.getUmaJsonErrorResponse(UmaErrorResponseType.NOT_AUTHORIZED_PERMISSION)).build());
}
}
// Add permission to RPT
if (umaAuthorizationService.allowToAddPermission(grant, rpt, resourceSetPermission, httpRequest, rptAuthorizationRequest.getClaims())) {
rptManager.addPermissionToRPT(rpt, resourceSetPermission);
invalidateTicket(resourceSetPermission);
return rpt;
}
// throw not authorized exception
throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN)
.entity(errorResponseFactory.getUmaJsonErrorResponse(UmaErrorResponseType.NOT_AUTHORIZED_PERMISSION)).build());
}
private void invalidateTicket(ResourceSetPermission resourceSetPermission) {
try {
resourceSetPermission.setExpirationDate(new Date(0)); // invalidate ticket and persist
ldapEntryManager.merge(resourceSetPermission);
} catch (Exception e) {
log.error("Failed to invalidate ticket: " + resourceSetPermission.getTicket() + ". " + e.getMessage(), e);
}
}
}
| Server/src/main/java/org/xdi/oxauth/uma/ws/rs/RptPermissionAuthorizationWS.java | /*
* oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.xdi.oxauth.uma.ws.rs;
import org.xdi.oxauth.model.util.Util;
import com.wordnik.swagger.annotations.Api;
import org.gluu.site.ldap.persistence.LdapEntryManager;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.log.Log;
import org.xdi.oxauth.model.common.AuthorizationGrant;
import org.xdi.oxauth.model.common.uma.UmaRPT;
import org.xdi.oxauth.model.config.ConfigurationFactory;
import org.xdi.oxauth.model.error.ErrorResponseFactory;
import org.xdi.oxauth.model.federation.FederationTrust;
import org.xdi.oxauth.model.federation.FederationTrustStatus;
import org.xdi.oxauth.model.registration.Client;
import org.xdi.oxauth.model.uma.RptAuthorizationRequest;
import org.xdi.oxauth.model.uma.RptAuthorizationResponse;
import org.xdi.oxauth.model.uma.UmaConstants;
import org.xdi.oxauth.model.uma.UmaErrorResponseType;
import org.xdi.oxauth.model.uma.persistence.ResourceSetPermission;
import org.xdi.oxauth.service.ClientService;
import org.xdi.oxauth.service.FederationDataService;
import org.xdi.oxauth.service.uma.RPTManager;
import org.xdi.oxauth.service.uma.ResourceSetPermissionManager;
import org.xdi.oxauth.service.uma.UmaValidationService;
import org.xdi.oxauth.service.uma.authorization.AuthorizationService;
import org.xdi.oxauth.util.ServerUtil;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import java.util.List;
import java.util.UUID;
/**
* The endpoint at which the requester asks for authorization to have a new permission.
*/
@Path("/requester/perm")
@Api(value = "/requester/perm", description = "RPT authorization endpoint. RPT is authorized with new permission(s).")
@Name("rptPermissionAuthorizationRestWebService")
public class RptPermissionAuthorizationWS {
@Logger
private Log log;
@In
private ErrorResponseFactory errorResponseFactory;
@In
private RPTManager rptManager;
@In
private ResourceSetPermissionManager resourceSetPermissionManager;
@In
private UmaValidationService umaValidationService;
@In
private AuthorizationService umaAuthorizationService;
@In
private FederationDataService federationDataService;
@In
private ClientService clientService;
@In
private LdapEntryManager ldapEntryManager;
@POST
@Consumes({UmaConstants.JSON_MEDIA_TYPE})
@Produces({UmaConstants.JSON_MEDIA_TYPE})
public Response requestRptPermissionAuthorization(
@HeaderParam("Authorization") String authorization,
@HeaderParam("Host") String amHost,
RptAuthorizationRequest rptAuthorizationRequest,
@Context HttpServletRequest httpRequest) {
try {
final AuthorizationGrant grant = umaValidationService.assertHasAuthorizationScope(authorization);
final String validatedAmHost = umaValidationService.validateAmHost(amHost);
final UmaRPT rpt = authorizeRptPermission(authorization, rptAuthorizationRequest, httpRequest, grant, validatedAmHost);
// convert manually to avoid possible conflict between resteasy providers, e.g. jettison, jackson
return Response.ok(ServerUtil.asJson(new RptAuthorizationResponse(rpt.getCode()))).build();
} catch (Exception ex) {
log.error("Exception happened", ex);
if (ex instanceof WebApplicationException) {
throw (WebApplicationException) ex;
}
throw new WebApplicationException(Response.status(Response.Status.INTERNAL_SERVER_ERROR)
.entity(errorResponseFactory.getUmaJsonErrorResponse(UmaErrorResponseType.SERVER_ERROR)).build());
}
}
private UmaRPT authorizeRptPermission(String authorization,
RptAuthorizationRequest rptAuthorizationRequest,
HttpServletRequest httpRequest,
AuthorizationGrant grant,
String amHost) {
UmaRPT rpt;
if (Util.isNullOrEmpty(rptAuthorizationRequest.getRpt())) {
rpt = rptManager.createRPT(authorization, amHost, false);
} else {
rpt = rptManager.getRPTByCode(rptAuthorizationRequest.getRpt());
}
// Validate RPT
try {
umaValidationService.validateRPT(rpt);
} catch (WebApplicationException e) {
// according to latest UMA spec ( dated 2015-02-23 https://docs.kantarainitiative.org/uma/draft-uma-core.html)
// it's up to implementation whether to create new RPT for each request or pass back requests RPT.
// Here we decided to pass back new RPT if request's RPT in invalid.
rpt = rptManager.getRPTByCode(rptAuthorizationRequest.getRpt());
}
final ResourceSetPermission resourceSetPermission = resourceSetPermissionManager.getResourceSetPermissionByTicket(rptAuthorizationRequest.getTicket());
// Validate resource set permission
umaValidationService.validateResourceSetPermission(resourceSetPermission);
final Boolean federationEnabled = ConfigurationFactory.instance().getConfiguration().getFederationEnabled();
if (federationEnabled != null && federationEnabled) {
final Client client = clientService.getClient(rpt.getClientId());
final List<FederationTrust> trustList = federationDataService.getTrustByClient(client, FederationTrustStatus.ACTIVE);
if (trustList != null && !trustList.isEmpty()) {
for (FederationTrust t : trustList) {
final Boolean skipAuthorization = t.getSkipAuthorization();
if (skipAuthorization != null && skipAuthorization) {
// grant access directly, client is in trust and skipAuthorization=true
log.trace("grant access directly, client is in trust and skipAuthorization=true");
rptManager.addPermissionToRPT(rpt, resourceSetPermission);
invalidateTicket(resourceSetPermission);
return rpt;
}
}
} else {
log.trace("Forbid RPT authorization - client is not in any trust however federation is enabled on server.");
// throw not authorized exception
throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN)
.entity(errorResponseFactory.getUmaJsonErrorResponse(UmaErrorResponseType.NOT_AUTHORIZED_PERMISSION)).build());
}
}
// Add permission to RPT
if (umaAuthorizationService.allowToAddPermission(grant, rpt, resourceSetPermission, httpRequest, rptAuthorizationRequest.getClaims())) {
rptManager.addPermissionToRPT(rpt, resourceSetPermission);
invalidateTicket(resourceSetPermission);
return rpt;
}
// throw not authorized exception
throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN)
.entity(errorResponseFactory.getUmaJsonErrorResponse(UmaErrorResponseType.NOT_AUTHORIZED_PERMISSION)).build());
}
private void invalidateTicket(ResourceSetPermission resourceSetPermission) {
try {
resourceSetPermission.setTicket(UUID.randomUUID().toString()); // invalidate ticket and persist
ldapEntryManager.merge(resourceSetPermission);
} catch (Exception e) {
log.error("Failed to invalidate ticket: " + resourceSetPermission.getTicket() + ". " + e.getMessage(), e);
}
}
}
| fixed uma ticket invalidation
| Server/src/main/java/org/xdi/oxauth/uma/ws/rs/RptPermissionAuthorizationWS.java | fixed uma ticket invalidation | <ide><path>erver/src/main/java/org/xdi/oxauth/uma/ws/rs/RptPermissionAuthorizationWS.java
<ide>
<ide> package org.xdi.oxauth.uma.ws.rs;
<ide>
<del>import org.xdi.oxauth.model.util.Util;
<ide> import com.wordnik.swagger.annotations.Api;
<ide> import org.gluu.site.ldap.persistence.LdapEntryManager;
<ide> import org.jboss.seam.annotations.In;
<ide> import org.xdi.oxauth.model.uma.UmaConstants;
<ide> import org.xdi.oxauth.model.uma.UmaErrorResponseType;
<ide> import org.xdi.oxauth.model.uma.persistence.ResourceSetPermission;
<add>import org.xdi.oxauth.model.util.Util;
<ide> import org.xdi.oxauth.service.ClientService;
<ide> import org.xdi.oxauth.service.FederationDataService;
<ide> import org.xdi.oxauth.service.uma.RPTManager;
<ide> import javax.ws.rs.WebApplicationException;
<ide> import javax.ws.rs.core.Context;
<ide> import javax.ws.rs.core.Response;
<add>import java.util.Date;
<ide> import java.util.List;
<del>import java.util.UUID;
<ide>
<ide> /**
<ide> * The endpoint at which the requester asks for authorization to have a new permission.
<ide>
<ide> private void invalidateTicket(ResourceSetPermission resourceSetPermission) {
<ide> try {
<del> resourceSetPermission.setTicket(UUID.randomUUID().toString()); // invalidate ticket and persist
<add> resourceSetPermission.setExpirationDate(new Date(0)); // invalidate ticket and persist
<ide> ldapEntryManager.merge(resourceSetPermission);
<ide> } catch (Exception e) {
<ide> log.error("Failed to invalidate ticket: " + resourceSetPermission.getTicket() + ". " + e.getMessage(), e); |
|
Java | apache-2.0 | 97f6f9f17869541f74574a4e4f9b663491c80a78 | 0 | AlesyaKorol/javatest,AlesyaKorol/javatest | package ru.stqa.pft.sandbox;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Created by Alesia on 13.04.17.
*/
public class SquareTests {
@Test
public void testArea () {
Square s = new Square(5);
Assert.assertEquals(s.area(), 25.0);
}
@Test
public void testArea1 () {
Square s = new Square(10);
Assert.assertEquals(s.area(), 100.0);
}
}
| sandbox/src/test/java/ru/stqa/pft/sandbox/SquareTests.java | package ru.stqa.pft.sandbox;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Created by Alesia on 13.04.17.
*/
public class SquareTests {
@Test
public void testArea () {
Square s = new Square(5);
Assert.assertEquals(s.area(), 20.0);
}
@Test
public void testArea1 () {
Square s = new Square(10);
Assert.assertEquals(s.area(), 100.0);
}
}
| Fix for testArea.
| sandbox/src/test/java/ru/stqa/pft/sandbox/SquareTests.java | Fix for testArea. | <ide><path>andbox/src/test/java/ru/stqa/pft/sandbox/SquareTests.java
<ide> @Test
<ide> public void testArea () {
<ide> Square s = new Square(5);
<del> Assert.assertEquals(s.area(), 20.0);
<add> Assert.assertEquals(s.area(), 25.0);
<ide>
<ide> }
<ide> |
|
Java | mit | 5d1cccc3f3ed5a24c64567772925b36e0d728555 | 0 | nking/curvature-scale-space-corners-and-transformations,nking/curvature-scale-space-corners-and-transformations | package thirdparty.libquantum;
import algorithms.misc.ComplexModifiable;
import algorithms.misc.Misc;
import algorithms.misc.MiscMath;
import java.util.Random;
/*
An implementation of the grover search algorithm,
ported here to java from the libquantum file grover.c.
The method calls have been adapted for re-use by
other algorithms and methods to accept a list of
numbers have been created.
The file grover.c has copyright:
Implementation of Grover's search algorithm
Copyright 2003 Bjoern Butscher, Hendrik Weimer
This file is a port to java from a c file in libquantum.
libquantum is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
libquantum is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with libquantum; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA
*/
public class Grover {
private boolean debug = true;
private int width0 = 0;
/*
from wikipedia:
------------------------
/ diffuser \
_____ _____ ____________ _____
|0> -[H⊗n]---| |--|H⊗n|--|2|0^n> -I_n|--|H⊗n|---- ...measure
| U_w | ----- ------------ ----
|1> -[H]-----| |---------------------------------
------|
Repeat U_w + diffuser O(sqrt(N)) times
*/
/**
*
* runtime complexity is O(reg.size * reg.width),
(because decoherence lambda is 0.0).
*
* @param state (f(x) == 1 when x == state, else f(x) == 0)
* @param reg
*/
private void oracle(int query, QuantumReg reg, Gates gates) {
int i;
//TODO: need to either double the number of bits used in
// initialization of the register to have those
// available here or need to adjust the algorithm
// to have same results with closer to current number of bits.
/*
function f(x)
== 1 when x satisifies search criteria,
that is, x == w
|U_w|x> = -|x>
== 0 else is 0, that is, x != w
|U_w|x> = |x>
// |x>|q> ----> (-1)^(f(x)) * |x>
*/
/*
-- for each query bit:
if query bit i is 0, flips that bit in all states
-- for each node state,
if bits 0 and 1 are set,
it flips the bit reg->width + 1
-- for each node state,
if bit reg->width + i is set,
it flips the bit reg->width
-- for each node.state (in reversed order):
if bits i and reg->width + i are set,
it flips the bit reg.width + 1 + i
-- for each node state,
if bits 0 and 1 are set,
it flips the bit reg->width + 1
-- for each query bit:
if query bit i is 0,
flip bit i in all node states
*/
//DEBUG
StringBuilder[] sbs = new StringBuilder[reg.size];
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = new StringBuilder();
sbs[ii] = sb;
}
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
//runtime complexity is O(reg.size * reg.width),
// (because decoherence lambda is 0.0).
for (i = 0; i < width0; i++) {
//if query bit i is 0, flip bit i in all node states
if ((query & (1 << i)) == 0) {
gates.quantum_sigma_x(i, reg);
}
}
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
//for each node.state:
// if bits 0 and 1 are set, it flips the bit reg->width + 1
gates.quantum_toffoli(0, 1, width0 + 1, reg);
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
for (i = 1; i < width0; i++) {
//for each node.state:
// if bits i and reg->width + i are set,
// it flips the bit reg->width + 1 + i
gates.quantum_toffoli(i, width0 + i, width0 + i + 1, reg);
}
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
//for each node.state:
// if bit reg->width + i is set,
// it flips the bit reg->width
gates.quantum_cnot(width0 + i, width0, reg);
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
for (i = width0 - 1; i > 0; i--) {
//for each node.state:
// if bits i and reg->width + i are set,
// it flips the bit reg.width + 1 + i
gates.quantum_toffoli(i, width0 + i, width0 + i + 1, reg);
}
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
//for each node.state:
// if bits 0 and 1 are set,
// it flips the bit reg->width + 1
gates.quantum_toffoli(0, 1, width0 + 1, reg);
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
for (i = 0; i < width0; i++) {
//if query bit i is 0, flip bit i in all node states
if ((query & (1 << i)) == 0) {
gates.quantum_sigma_x(i, reg);
}
}
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
System.out.println("END STATES");
for (int ii = 0; ii < reg.size; ii++) {
System.out.println(sbs[ii]);
}
}
}
/**
*
* runtime complexity is O(reg.size * reg.width),
(because decoherence lambda is 0.0).
*
* @param state (f(x) == 1 when x == state, else f(x) == 0)
* @param reg
*/
private void oracle2(int query, QuantumReg reg, Gates gates) {
int i;
/*
function f(x)
== 1 when x satisifies search criteria,
that is, x == w
|U_w|x> = -|x>
== 0 else is 0, that is, x != w
|U_w|x> = |x>
// |x>|q> ----> (-1)^(f(x)) * |x>
*/
//DEBUG
StringBuilder[] sbs = new StringBuilder[reg.size];
if (debug) {//DEBUG
for (i = 0; i < reg.size; i++) {
StringBuilder sb = new StringBuilder();
sbs[i] = sb;
}
for (i = 0; i < reg.size; i++) {
StringBuilder sb = sbs[i];
String str = Long.toBinaryString(reg.node[i].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
System.out.println("STATES");
for (i = 0; i < reg.size; i++) {
System.out.println(sbs[i]);
}
}
/*
TODO: review this for the computation model.
might need to be changed to single bit operations.
the libquantum code uses unitary operations, but the result
is that a state including the set bits but is not an exact query
match gets high bit set too.
wanting to set the highest bit if the state bits 0 through width0
are equal to query.
*/
for (i = 0; i < reg.size; ++i) {
if ((reg.node[i].state & ~(1 << width0)) == query) {
// set the highest bit
//reg.node[i].state |= (1 << (reg.width - 1));
//toggle highest bit
//reg.node[i].state ^= (1 << width0);
reg.node[i].amplitude.times(-1);
} else {
// wanting to unset all set bit so that subsequent
// hadamard gate doesn't create a node with value
// query when it's absent.
// cycling from a power of 2 might be responsible.
//
//
// NOTE: adjustments to oracle1 to result in
// a high bit set as a marker would be consistent
// with the computational model.
// then will use the highbit to unset bits
// in the remaining states.
// then the grover diffuser should work without
// a period of numbers adding a state not present
// in the original number list.
//NOTE, when have it working well with just one bit extra,
// should be able to change the initialization of the
// register to only include the original numbers.
// the 2nd set shifted and with a negative value
// should be unecessary and may more complex physics
// to implement. looks a little odd, but haven't
// spent time on that yet...
}
}
if (debug) {//DEBUG
for (i = 0; i < reg.size; i++) {
StringBuilder sb = sbs[i];
String str = Long.toBinaryString(reg.node[i].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
System.out.println("END STATES");
for (i = 0; i < reg.size; i++) {
System.out.println(sbs[i]);
}
}
}
/**
runtime complexity is O(reg.size * reg.width),
(because decoherence lambda is 0.0).
* @param reg
*/
private void inversion(QuantumReg reg, Gates gates) {
int i;
//|2|0^n> -I_n|
//Flip the target bit of each basis state, i
for (i = 0; i < width0; i++) {
gates.quantum_sigma_x(i, reg);
}
gates.quantum_hadamard(width0 - 1, reg);
if (width0 == 3) {
gates.quantum_toffoli(0, 1, 2, reg);
} else {
//If bits 0 and 1 are set, it flips the target bit.
gates.quantum_toffoli(0, 1, width0 + 1, reg);
for (i = 1; i < width0 - 1; i++) {
//If bits i and reg.width+i are set, it flips the target bit.
gates.quantum_toffoli(i, width0 + i, width0 + i + 1, reg);
}
//for each reg.state,
// Flip the target bit of a basis state if
// the control bit is set
gates.quantum_cnot(width0 + i, width0 - 1, reg);
for (i = width0 - 2; i > 0; i--) {
//If bits i and reg.width+i are set, it flips the target bit.
gates.quantum_toffoli(i, width0 + i, width0 + i + 1, reg);
}
//If bits 0 and 1 are set, it flips the target bit.
gates.quantum_toffoli(0, 1, width0 + 1, reg);
}
gates.quantum_hadamard(width0 - 1, reg);
//Flip the target bit of each basis state, i
for (i = 0; i < width0; i++) {
gates.quantum_sigma_x(i, reg);
}
}
/**
* runtime complexity is O(reg.size * reg.width) (because decoherence lambda is 0.0).
*
* @param target
* (f(x) == 1 when x == target, else f(x) == 0)
* @param reg
*/
private void grover(int target, QuantumReg reg, Gates gates, QuReg qureg) {
int i;
//unitary operator operating on two qubits, target and each i
// |x>|q> ----> (-1)^(f(x)) * |x>
// (gives the found solutions negative signs)
oracle(target, reg, gates);
if (debug) {//DEBUG
System.out.format(
"AFTER oracle target=%d reg.size=%d hash.length=%d\n",
target, reg.size, 1 << reg.hashw);
qureg.quantum_print_qureg(reg);
}
// H⊗n |2|0^n> -I_n| H⊗n
for (i = 0; i < width0; i++) {
gates.quantum_hadamard(i, reg);
}
if (debug) {//DEBUG
System.out.format(
"AFTER hadamard target=%d hadamard reg.size=%d\n",
target, reg.size);
qureg.quantum_print_qureg(reg);
}
inversion(reg, gates);
if (debug) {//DEBUG
System.out.format(
"AFTER target=%d inversion reg.size=%d\n",
target, reg.size);
qureg.quantum_print_qureg(reg);
}
for (i = 0; i < width0; i++) {
gates.quantum_hadamard(i, reg);
}
if (debug) {//DEBUG
System.out.format("AFTER target=%d 2nd hadamard reg.size=%d\n",
target, reg.size);
qureg.quantum_print_qureg(reg);
}
/*
NOTE: the diffusion filter H⊗n |2|0^n> -I_n| H⊗n
can end up falsely creating a number which is not
present in the initial list,
but which is a cycle in the numbers,
that is an offset from a power of 2 that is == query.
for example, a list with a 2 and 7 but no 5
resulted in changing the state of the 2 to 5
looking at modifiying the oracle to use an extra high
bit to mark the matches (those which are currently
the ones with width0 bit flipped)
and then use that in a gate to set all other
bits to 0 when high bit is not set.
that should avoid the cycling.
*/
}
/** runtime complexity is O(reg.size * reg.width) * nLoop
(the runtime complexity of the preparation of the register
* is ignored. it is O(2^width)).
* Note that nLoop is (Math.PI / 4) * Math.sqrt(2^width)
* where width is (the bit length of number) + 1
*
* @param number a number to search for in the enumeration of numbers
* from 0 to 2^(number bit length + 1)
*/
public int run(int number) {
int width = MiscMath.numberOfBits(number + 1);
return run(number, width);
}
/**
* runtime complexity is O(reg.size * reg.width) * nLoop.
* Note that nLoop is (Math.PI / 4) * Math.sqrt(2^width).
* (the runtime complexity of the preparation of the register
* is ignored. it is O(2^width)).
*
* @param number a number to search for in the enumeration of numbers
* from 0 to 2^width.
* @param width largest bit length to use in enumeration.
* NOTE that if it is less than (the bit length of number) + 1,
* it will be increased to that.
* @return
*/
public int run(int number, int width) {
int i;
final int N = number;
Random rng = Misc.getSecureRandom();
Gates gates = new Gates(rng);
int tmp = MiscMath.numberOfBits(N + 1);
if (width < tmp) {
width = tmp;
}
if (width < 2) {
width = 2;
}
width0 = width;
QuReg qureg = new QuReg();
QuantumReg reg = qureg.quantum_new_qureg(0, width0);
if (debug) {//DEBUG
System.out.format(
"AFTER construction reg.size=%d reg.width=%d hash.length=%d\n",
reg.size, reg.width, 1 << reg.hashw);
qureg.quantum_print_qureg(reg);
}
//Flip the target bit of each basis state, reg.width
//runtime complexity is O(reg.size) (because decoherence lambda is 0.0).
qureg.quantum_addscratch(1, reg);
reg.width += 1;
qureg.quantum_expand_and_reconstruct_hash(reg);
gates.quantum_sigma_x(width0, reg);
System.out.format("N = %d, width0=%d reg.width=%d\n", N, width0,
reg.width);
if (debug) {
//DEBUG
System.out.format("AFTER sigma_x reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
//runtime complexity is O(reg.size * reg.width)
for (i = 0; i < width0 + 1; i++) {
gates.quantum_hadamard(i, reg);
}
if (debug) {//DEBUG
System.out.format(
"AFTER 1st hadamard gates reg.size=%d reg.width=%d hash.length=%d\n",
reg.size, reg.width, 1 << reg.hashw);
qureg.quantum_print_qureg(reg);
}
if (debug) {//DEBUG
System.out.format("AFTER 2 1st hadamard gates reg.size=%d reg.width=%d hash.length=%d\n",
reg.size, reg.width, 1 << reg.hashw);
qureg.quantum_print_qureg(reg);
}
// upper limit to number of iterations from:
//"Tight Bounds on Quantum Searching" by Boyer, Brassard, Hoyer, and Tapp
int end = (int) (Math.PI / 4 * Math.sqrt(1 << width0));
System.out.format("Iterating %d times\n", end);
//runtime complexity is O(reg.size * reg.width) * nLoop
for (i = 1; i <= end; i++) {
System.out.format("Iteration #%d\n", i);
grover(N, reg, gates, qureg);
}
if (debug) { //DEBUG
System.out.format(
"AFTER grover reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
gates.quantum_hadamard(width0, reg);
if (debug) {//DEBUG//DEBUG
System.out.format(
"AFTER last hadamard reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
Measure measure = new Measure();
// runtime complexity is O(reg.size)
measure.quantum_bmeasure(width0, reg, rng);
//DEBUG
System.out.format(
"AFTER bmeasure reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
for (i = 0; i < reg.size; i++) {
if (reg.node[i].state == N) {
System.out.format(
"\nFound %d with a probability of %f\n\n", N,
reg.node[i].amplitude.squareSum());
return number;
}
}
return -1;
}
// ---- adding ability to find number within a list of numbers for use
// within the quantum min algorithm ----
/**
* runtime complexity for the search
* is O(reg.size * reg.width) * nLoop
* (the runtime complexity of the preparation of the register for the list,
* O(N),
* is ignored just as in the enumerated run method).
* NOTE that the width should be set to the most number of bits needed
* for any number in list.
* NOTE also that the largest number in the list must be
* .lte. integer.max_value - 2^width.
* @param number a number to search for in the enumeration of numbers
* from 0 to 2^width.
* @param width largest bit length to use in enumeration.
* NOTE that if it is less than (the bit length of number) + 1,
* it will be increased to that.
* @param list a list of unordered numbers to search for number within.
* NOTE that the list must be valid input.
* @return
*/
public int run(int number, int width, int[] list) {
int N = number;
int i;
int tmp = MiscMath.numberOfBits(N);
if (width < tmp) {
width = tmp;
}
if (width < 2) {
width = 2;
}
width0 = width;
QuReg qureg = new QuReg();
QuantumReg reg = initializeRegister(qureg, list);
System.out.format("N = %d, list.length=%d, width0=%d reg.width=%d\n", N,
list.length, width0, reg.width);
Random rng = Misc.getSecureRandom();
int ret = processInitialized(number, reg, rng);
return ret;
}
public int run(int number, int width, int setQuBits) {
width0 = width;
QuReg qureg = new QuReg();
QuantumReg reg = initializeRegister(qureg, setQuBits);
System.out.format("N = %d, width0=%d reg.width=%d\n", number,
width0, reg.width);
Random rng = Misc.getSecureRandom();
int ret = processInitialized(number, reg, rng);
return ret;
}
/**
* Initialize the register with a list of numbers.
* Note, that the register, as the possible states of superposition of
* qubits, will have all possible permutation of the qubits up to the
* power of 2 or next higher power of 2 in the list.
* A continuous sequence of numbers from 0 up to a power of 2 is valid
* input for the current logic (can be unordered).
* A continuous sequence of numbers from a power of 2 up to a power of 2.
* is valid input.
*/
/**
* Initialize the register with a list of numbers as the eigenstate,
* superposition, and their amplitudes.
*
* @param qureg
* @param stateList
* @param amplList amplitudes associated with the eigenstate at same index
* in stateList
* @return
*/
public QuantumReg initializeRegister(QuReg qureg,
ComplexModifiable[] amplList, int[] stateList) {
final int initSize = 2 * amplList.length;
QuantumReg reg = qureg.quantum_new_qureg_size(initSize, width0);
reg.width += 1;
qureg.quantum_expand_and_reconstruct_hash(reg);
//need to initialize a register to have the given states from list
//and a set of the same numbers but with negative amplitude and
//the next highest bit set, that is width + 1
// rest of the algorithm should proceed in same manner.
int offset = 1 << width0;
int i;
double invSqrt = 1./Math.sqrt(2.);
for (i = 0; i < amplList.length; ++i) {
reg.node[i].state = stateList[i];
reg.node[i].state |= offset;
reg.node[i].amplitude.resetTo(amplList[i]);
reg.node[i].amplitude.times(-invSqrt);
}
int idx = amplList.length;
for (i = 0; i < amplList.length; ++i) {
reg.node[idx].state = stateList[i];
reg.node[idx].amplitude.resetTo(amplList[i]);
reg.node[idx].amplitude.times(invSqrt);
idx++;
}
return reg;
}
/**
* Initialize the register with a list of numbers as the eigenstate,
* superposition, and their amplitudes.
*
* @param qureg
* @param setBits
* @return
*/
public QuantumReg initializeRegister(QuReg qureg, int setBits) {
int nBits = MiscMath.numberOfBits(setBits);
int i;
int nSetBits = 0;
for (i = 0; i < nBits; ++i) {
if ((setBits & (1 << i)) != 0) {
nSetBits++;
}
}
QuantumReg reg = qureg.quantum_new_qureg_size(
2*nSetBits, width0);
reg.width += 1;
qureg.quantum_expand_and_reconstruct_hash(reg);
int offset = 1 << width0;
double norm = 1./Math.sqrt(2*nSetBits);
int ii = 0;
for (i = 0; i < nBits; ++i) {
if ((setBits & (1 << i)) != 0) {
//initializing with same state + highbit off of register
reg.node[ii].state = (1 << i);
reg.node[i].state |= offset;
//use negative amplitude
reg.node[ii].amplitude.setReal(-norm);
++ii;
}
}
for (i = 0; i < nBits; ++i) {
if ((setBits & (1 << i)) != 0) {
reg.node[ii].state = 1 << i;
reg.node[ii].amplitude.setReal(norm);
++ii;
}
}
if (debug) {//DEBUG
System.out.format("initialized reg.size=%d\n", reg.size);
qureg.quantum_print_qureg(reg);
}
return reg;
}
/**
* Initialize the register with a list of numbers as the eigenstate,
* superposition, and their amplitudes.
*
* @param qureg
* @param setBits
* @param width
* @return
*/
private QuantumReg initializeRegister(QuReg qureg, int[] list) {
int listLen = list.length;
final int initSize = 2 * listLen;
QuantumReg reg = qureg.quantum_new_qureg_size(initSize, width0);
reg.width += 1;
qureg.quantum_expand_and_reconstruct_hash(reg);
//need to initialize a register to have the given states from list
//and a set of the same numbers but with negative amplitude and
//the next highest bit set, that is width + 1
// rest of the algorithm should proceed in same manner.
int offset = 1 << width0;
int i;
double norm = 1./Math.sqrt(initSize);
int ii = 0;
for (i = 0; i < list.length; ++i) {
reg.node[ii].state = list[i];
reg.node[ii].state |= offset;
reg.node[ii].amplitude.setReal(-norm);
++ii;
}
for (i = 0; i < list.length; ++i) {
reg.node[ii].state = list[i];
reg.node[ii].amplitude.setReal(norm);
++ii;
}
if (debug) {//DEBUG
System.out.format("AFTER init reg.size=%d "
+ "reg.width=%d reg.hash.length=%d\n", reg.size,
reg.width, (1 << reg.hashw));
qureg.quantum_print_qureg(reg);
}
return reg;
}
/**
* runtime complexity for the processing
* is O(reg.size * reg.width) * nLoop
* (the runtime complexity of the preparation of the register for the list,
* O(N),
* is ignored just as in the enumerated run method).
* NOTE that the width should be set to the most number of bits needed
* for any number in list.
* NOTE also that the largest number in the list must be
* .lte. integer.max_value - 2^width.
* NOTE that measurements of register reg are not taken.
* @param number a number to search for within the initialized register reg
* @param reg initialized register which holds nodes of state which are
* searched and have amplitudes which when squared and summed over register
* are equal to 1.
* @param rng
* @return
*/
public int processInitialized(int number, QuantumReg reg, Random rng) {
int i;
final int N = number;
QuReg qureg = new QuReg();
//DEBUG
//System.out.format("AFTER construction reg.size=%d\n", reg.size);
//qureg.quantum_print_qureg(reg);
// upper limit to number of iterations from:
//"Tight Bounds on Quantum Searching" by Boyer, Brassard, Hoyer, and Tapp
// NOTE that if the number of times number will appear in list
// is known ahead of time,
// the term in the sqrt can be divided by that multiplicity.
int end = (int) (Math.PI / 4 * Math.sqrt(1 << width0));
System.out.format("Iterating %d times\n", end);
Gates gates = new Gates(rng);
//runtime complexity is O(reg.size * reg.width) * nLoop
for (i = 1; i <= end; i++) {
System.out.format("Iteration #%d\n", i);
grover(N, reg, gates, qureg);
}
if (debug) { //DEBUG
System.out.format(
"AFTER grover reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
gates.quantum_hadamard(width0, reg);
if (debug) {//DEBUG//DEBUG
System.out.format(
"AFTER last hadamard reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
Measure measure = new Measure();
// runtime complexity is O(reg.size)
measure.quantum_bmeasure(width0, reg, rng);
//DEBUG
System.out.format(
"AFTER bmeasure reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
for (i = 0; i < reg.size; i++) {
if (reg.node[i].state == N) {
System.out.format(
"\nFound %d with a probability of %f\n\n", N,
reg.node[i].amplitude.squareSum());
return number;
}
}
return -1;
}
}
| src/thirdparty/libquantum/Grover.java | package thirdparty.libquantum;
import algorithms.misc.ComplexModifiable;
import algorithms.misc.Misc;
import algorithms.misc.MiscMath;
import java.util.Random;
/*
An implementation of the grover search algorithm,
ported here to java from the libquantum file grover.c.
The method calls have been adapted for re-use by
other algorithms and methods to accept a list of
numbers have been created.
The file grover.c has copyright:
Implementation of Grover's search algorithm
Copyright 2003 Bjoern Butscher, Hendrik Weimer
This file is a port to java from a c file in libquantum.
libquantum is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
libquantum is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with libquantum; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA
*/
public class Grover {
private boolean debug = true;
private int width0 = 0;
/*
from wikipedia:
------------------------
/ diffuser \
_____ _____ ____________ _____
|0> -[H⊗n]---| |--|H⊗n|--|2|0^n> -I_n|--|H⊗n|---- ...measure
| U_w | ----- ------------ ----
|1> -[H]-----| |---------------------------------
------|
Repeat U_w + diffuser O(sqrt(N)) times
*/
/**
*
* runtime complexity is O(reg.size * reg.width),
(because decoherence lambda is 0.0).
*
* @param state (f(x) == 1 when x == state, else f(x) == 0)
* @param reg
*/
private void oracle(int query, QuantumReg reg, Gates gates) {
int i;
/*
function f(x)
== 1 when x satisifies search criteria,
that is, x == w
|U_w|x> = -|x>
== 0 else is 0, that is, x != w
|U_w|x> = |x>
// |x>|q> ----> (-1)^(f(x)) * |x>
*/
/*
-- for each query bit:
if query bit i is 0, flips that bit in all states
-- for each node state,
if bits 0 and 1 are set,
it flips the bit reg->width + 1
-- for each node state,
if bit reg->width + i is set,
it flips the bit reg->width
-- for each node.state (in reversed order):
if bits i and reg->width + i are set,
it flips the bit reg.width + 1 + i
-- for each node state,
if bits 0 and 1 are set,
it flips the bit reg->width + 1
-- for each query bit:
if query bit i is 0,
flip bit i in all node states
*/
//DEBUG
StringBuilder[] sbs = new StringBuilder[reg.size];
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = new StringBuilder();
sbs[ii] = sb;
}
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
//runtime complexity is O(reg.size * reg.width),
// (because decoherence lambda is 0.0).
for (i = 0; i < width0; i++) {
//if query bit i is 0, flip bit i in all node states
if ((query & (1 << i)) == 0) {
gates.quantum_sigma_x(i, reg);
}
}
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
//for each node.state:
// if bits 0 and 1 are set, it flips the bit reg->width + 1
gates.quantum_toffoli(0, 1, width0 + 1, reg);
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
for (i = 1; i < width0; i++) {
//for each node.state:
// if bits i and reg->width + i are set,
// it flips the bit reg->width + 1 + i
gates.quantum_toffoli(i, width0 + i, width0 + i + 1, reg);
}
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
//for each node.state:
// if bit reg->width + i is set,
// it flips the bit reg->width
gates.quantum_cnot(width0 + i, width0, reg);
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
for (i = width0 - 1; i > 0; i--) {
//for each node.state:
// if bits i and reg->width + i are set,
// it flips the bit reg.width + 1 + i
gates.quantum_toffoli(i, width0 + i, width0 + i + 1, reg);
}
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
//for each node.state:
// if bits 0 and 1 are set,
// it flips the bit reg->width + 1
gates.quantum_toffoli(0, 1, width0 + 1, reg);
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
}
for (i = 0; i < width0; i++) {
//if query bit i is 0, flip bit i in all node states
if ((query & (1 << i)) == 0) {
gates.quantum_sigma_x(i, reg);
}
}
if (debug) {//DEBUG
for (int ii = 0; ii < reg.size; ii++) {
StringBuilder sb = sbs[ii];
String str = Long.toBinaryString(reg.node[ii].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
System.out.println("END STATES");
for (int ii = 0; ii < reg.size; ii++) {
System.out.println(sbs[ii]);
}
}
}
/**
*
* runtime complexity is O(reg.size * reg.width),
(because decoherence lambda is 0.0).
*
* @param state (f(x) == 1 when x == state, else f(x) == 0)
* @param reg
*/
private void oracle2(int query, QuantumReg reg, Gates gates) {
int i;
/*
function f(x)
== 1 when x satisifies search criteria,
that is, x == w
|U_w|x> = -|x>
== 0 else is 0, that is, x != w
|U_w|x> = |x>
// |x>|q> ----> (-1)^(f(x)) * |x>
*/
//DEBUG
StringBuilder[] sbs = new StringBuilder[reg.size];
if (debug) {//DEBUG
for (i = 0; i < reg.size; i++) {
StringBuilder sb = new StringBuilder();
sbs[i] = sb;
}
for (i = 0; i < reg.size; i++) {
StringBuilder sb = sbs[i];
String str = Long.toBinaryString(reg.node[i].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
System.out.println("STATES");
for (i = 0; i < reg.size; i++) {
System.out.println(sbs[i]);
}
}
/*
TODO: review this for the computation model.
might need to be changed to single bit operations.
the libquantum code uses unitary operations, but the result
is that a state including the set bits but is not an exact query
match gets high bit set too.
wanting to set the highest bit if the state bits 0 through width0
are equal to query.
*/
for (i = 0; i < reg.size; ++i) {
if ((reg.node[i].state & ~(1 << width0)) == query) {
// set the highest bit
//reg.node[i].state |= (1 << (reg.width - 1));
//toggle highest bit
//reg.node[i].state ^= (1 << width0);
reg.node[i].amplitude.times(-1);
}
}
if (debug) {//DEBUG
for (i = 0; i < reg.size; i++) {
StringBuilder sb = sbs[i];
String str = Long.toBinaryString(reg.node[i].state);
while (str.length() < reg.width) {
str = "0" + str;
}
sb.append(str).append(" ");
}
System.out.println("END STATES");
for (i = 0; i < reg.size; i++) {
System.out.println(sbs[i]);
}
}
}
/**
runtime complexity is O(reg.size * reg.width),
(because decoherence lambda is 0.0).
* @param reg
*/
private void inversion(QuantumReg reg, Gates gates) {
int i;
//|2|0^n> -I_n|
//Flip the target bit of each basis state, i
for (i = 0; i < width0; i++) {
gates.quantum_sigma_x(i, reg);
}
gates.quantum_hadamard(width0 - 1, reg);
if (width0 == 3) {
gates.quantum_toffoli(0, 1, 2, reg);
} else {
//If bits 0 and 1 are set, it flips the target bit.
gates.quantum_toffoli(0, 1, width0 + 1, reg);
for (i = 1; i < width0 - 1; i++) {
//If bits i and reg.width+i are set, it flips the target bit.
gates.quantum_toffoli(i, width0 + i, width0 + i + 1, reg);
}
//for each reg.state,
// Flip the target bit of a basis state if
// the control bit is set
gates.quantum_cnot(width0 + i, width0 - 1, reg);
for (i = width0 - 2; i > 0; i--) {
//If bits i and reg.width+i are set, it flips the target bit.
gates.quantum_toffoli(i, width0 + i, width0 + i + 1, reg);
}
//If bits 0 and 1 are set, it flips the target bit.
gates.quantum_toffoli(0, 1, width0 + 1, reg);
}
gates.quantum_hadamard(width0 - 1, reg);
//Flip the target bit of each basis state, i
for (i = 0; i < width0; i++) {
gates.quantum_sigma_x(i, reg);
}
}
/**
* runtime complexity is O(reg.size * reg.width) (because decoherence lambda is 0.0).
*
* @param target
* (f(x) == 1 when x == target, else f(x) == 0)
* @param reg
*/
private void grover(int target, QuantumReg reg, Gates gates, QuReg qureg) {
int i;
//unitary operator operating on two qubits, target and each i
// |x>|q> ----> (-1)^(f(x)) * |x>
// (gives the found solutions negative signs)
oracle(target, reg, gates);
if (debug) {//DEBUG
System.out.format(
"AFTER oracle target=%d reg.size=%d hash.length=%d\n",
target, reg.size, 1 << reg.hashw);
qureg.quantum_print_qureg(reg);
}
// H⊗n |2|0^n> -I_n| H⊗n
for (i = 0; i < width0; i++) {
gates.quantum_hadamard(i, reg);
}
if (debug) {//DEBUG
System.out.format(
"AFTER hadamard target=%d hadamard reg.size=%d\n",
target, reg.size);
qureg.quantum_print_qureg(reg);
}
inversion(reg, gates);
if (debug) {//DEBUG
System.out.format(
"AFTER target=%d inversion reg.size=%d\n",
target, reg.size);
qureg.quantum_print_qureg(reg);
}
for (i = 0; i < width0; i++) {
gates.quantum_hadamard(i, reg);
}
if (debug) {//DEBUG
System.out.format("AFTER target=%d 2nd hadamard reg.size=%d\n",
target, reg.size);
qureg.quantum_print_qureg(reg);
}
}
/** runtime complexity is O(reg.size * reg.width) * nLoop
(the runtime complexity of the preparation of the register
* is ignored. it is O(2^width)).
* Note that nLoop is (Math.PI / 4) * Math.sqrt(2^width)
* where width is (the bit length of number) + 1
*
* @param number a number to search for in the enumeration of numbers
* from 0 to 2^(number bit length + 1)
*/
public int run(int number) {
int width = MiscMath.numberOfBits(number + 1);
return run(number, width);
}
/**
* runtime complexity is O(reg.size * reg.width) * nLoop.
* Note that nLoop is (Math.PI / 4) * Math.sqrt(2^width).
* (the runtime complexity of the preparation of the register
* is ignored. it is O(2^width)).
*
* @param number a number to search for in the enumeration of numbers
* from 0 to 2^width.
* @param width largest bit length to use in enumeration.
* NOTE that if it is less than (the bit length of number) + 1,
* it will be increased to that.
* @return
*/
public int run(int number, int width) {
int i;
final int N = number;
Random rng = Misc.getSecureRandom();
Gates gates = new Gates(rng);
int tmp = MiscMath.numberOfBits(N + 1);
if (width < tmp) {
width = tmp;
}
if (width < 2) {
width = 2;
}
width0 = width;
QuReg qureg = new QuReg();
QuantumReg reg = qureg.quantum_new_qureg(0, width0);
if (debug) {//DEBUG
System.out.format(
"AFTER construction reg.size=%d reg.width=%d hash.length=%d\n",
reg.size, reg.width, 1 << reg.hashw);
qureg.quantum_print_qureg(reg);
}
//Flip the target bit of each basis state, reg.width
//runtime complexity is O(reg.size) (because decoherence lambda is 0.0).
qureg.quantum_addscratch(1, reg);
reg.width += 1;
qureg.quantum_expand_and_reconstruct_hash(reg);
gates.quantum_sigma_x(width0, reg);
System.out.format("N = %d, width0=%d reg.width=%d\n", N, width0,
reg.width);
if (debug) {
//DEBUG
System.out.format("AFTER sigma_x reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
//runtime complexity is O(reg.size * reg.width)
for (i = 0; i < width0 + 1; i++) {
gates.quantum_hadamard(i, reg);
}
if (debug) {//DEBUG
System.out.format(
"AFTER 1st hadamard gates reg.size=%d reg.width=%d hash.length=%d\n",
reg.size, reg.width, 1 << reg.hashw);
qureg.quantum_print_qureg(reg);
}
if (debug) {//DEBUG
System.out.format("AFTER 2 1st hadamard gates reg.size=%d reg.width=%d hash.length=%d\n",
reg.size, reg.width, 1 << reg.hashw);
qureg.quantum_print_qureg(reg);
}
// upper limit to number of iterations from:
//"Tight Bounds on Quantum Searching" by Boyer, Brassard, Hoyer, and Tapp
int end = (int) (Math.PI / 4 * Math.sqrt(1 << width0));
System.out.format("Iterating %d times\n", end);
//runtime complexity is O(reg.size * reg.width) * nLoop
for (i = 1; i <= end; i++) {
System.out.format("Iteration #%d\n", i);
grover(N, reg, gates, qureg);
}
if (debug) { //DEBUG
System.out.format(
"AFTER grover reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
gates.quantum_hadamard(width0, reg);
if (debug) {//DEBUG//DEBUG
System.out.format(
"AFTER last hadamard reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
Measure measure = new Measure();
// runtime complexity is O(reg.size)
measure.quantum_bmeasure(width0, reg, rng);
//DEBUG
System.out.format(
"AFTER bmeasure reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
for (i = 0; i < reg.size; i++) {
if (reg.node[i].state == N) {
System.out.format(
"\nFound %d with a probability of %f\n\n", N,
reg.node[i].amplitude.squareSum());
return number;
}
}
return -1;
}
// ---- adding ability to find number within a list of numbers for use
// within the quantum min algorithm ----
/**
* runtime complexity for the search
* is O(reg.size * reg.width) * nLoop
* (the runtime complexity of the preparation of the register for the list,
* O(N),
* is ignored just as in the enumerated run method).
* NOTE that the width should be set to the most number of bits needed
* for any number in list.
* NOTE also that the largest number in the list must be
* .lte. integer.max_value - 2^width.
* @param number a number to search for in the enumeration of numbers
* from 0 to 2^width.
* @param width largest bit length to use in enumeration.
* NOTE that if it is less than (the bit length of number) + 1,
* it will be increased to that.
* @param list a list of unordered numbers to search for number within.
* NOTE that the list must be valid input.
* @return
*/
public int run(int number, int width, int[] list) {
int N = number;
int i;
int tmp = MiscMath.numberOfBits(N);
if (width < tmp) {
width = tmp;
}
if (width < 2) {
width = 2;
}
width0 = width;
QuReg qureg = new QuReg();
QuantumReg reg = initializeRegister(qureg, list);
System.out.format("N = %d, list.length=%d, width0=%d reg.width=%d\n", N,
list.length, width0, reg.width);
Random rng = Misc.getSecureRandom();
int ret = processInitialized(number, reg, rng);
return ret;
}
public int run(int number, int width, int setQuBits) {
width0 = width;
QuReg qureg = new QuReg();
QuantumReg reg = initializeRegister(qureg, setQuBits);
System.out.format("N = %d, width0=%d reg.width=%d\n", number,
width0, reg.width);
Random rng = Misc.getSecureRandom();
int ret = processInitialized(number, reg, rng);
return ret;
}
/**
* Initialize the register with a list of numbers.
* Note, that the register, as the possible states of superposition of
* qubits, will have all possible permutation of the qubits up to the
* power of 2 or next higher power of 2 in the list.
* A continuous sequence of numbers from 0 up to a power of 2 is valid
* input for the current logic (can be unordered).
* A continuous sequence of numbers from a power of 2 up to a power of 2.
* is valid input.
*/
/**
* Initialize the register with a list of numbers as the eigenstate,
* superposition, and their amplitudes.
*
* @param qureg
* @param stateList
* @param amplList amplitudes associated with the eigenstate at same index
* in stateList
* @return
*/
public QuantumReg initializeRegister(QuReg qureg,
ComplexModifiable[] amplList, int[] stateList) {
final int initSize = 2 * amplList.length;
QuantumReg reg = qureg.quantum_new_qureg_size(initSize, width0);
reg.width += 1;
qureg.quantum_expand_and_reconstruct_hash(reg);
//need to initialize a register to have the given states from list
//and a set of the same numbers but with negative amplitude and
//the next highest bit set, that is width + 1
// rest of the algorithm should proceed in same manner.
int offset = 1 << width0;
int i;
double invSqrt = 1./Math.sqrt(2.);
for (i = 0; i < amplList.length; ++i) {
reg.node[i].state = stateList[i];
reg.node[i].state |= offset;
reg.node[i].amplitude.resetTo(amplList[i]);
reg.node[i].amplitude.times(-invSqrt);
}
int idx = amplList.length;
for (i = 0; i < amplList.length; ++i) {
reg.node[idx].state = stateList[i];
reg.node[idx].amplitude.resetTo(amplList[i]);
reg.node[idx].amplitude.times(invSqrt);
idx++;
}
return reg;
}
/**
* Initialize the register with a list of numbers as the eigenstate,
* superposition, and their amplitudes.
*
* @param qureg
* @param setBits
* @return
*/
public QuantumReg initializeRegister(QuReg qureg, int setBits) {
int nBits = MiscMath.numberOfBits(setBits);
int i;
int nSetBits = 0;
for (i = 0; i < nBits; ++i) {
if ((setBits & (1 << i)) != 0) {
nSetBits++;
}
}
QuantumReg reg = qureg.quantum_new_qureg_size(
2*nSetBits, width0);
reg.width += 1;
qureg.quantum_expand_and_reconstruct_hash(reg);
int offset = 1 << width0;
double norm = 1./Math.sqrt(2*nSetBits);
int ii = 0;
for (i = 0; i < nBits; ++i) {
if ((setBits & (1 << i)) != 0) {
//initializing with same state + highbit off of register
reg.node[ii].state = (1 << i);
reg.node[i].state |= offset;
//use negative amplitude
reg.node[ii].amplitude.setReal(-norm);
++ii;
}
}
for (i = 0; i < nBits; ++i) {
if ((setBits & (1 << i)) != 0) {
reg.node[ii].state = 1 << i;
reg.node[ii].amplitude.setReal(norm);
++ii;
}
}
if (debug) {//DEBUG
System.out.format("initialized reg.size=%d\n", reg.size);
qureg.quantum_print_qureg(reg);
}
return reg;
}
/**
* Initialize the register with a list of numbers as the eigenstate,
* superposition, and their amplitudes.
*
* @param qureg
* @param setBits
* @param width
* @return
*/
private QuantumReg initializeRegister(QuReg qureg, int[] list) {
int listLen = list.length;
final int initSize = 2 * listLen;
QuantumReg reg = qureg.quantum_new_qureg_size(initSize, width0);
reg.width += 1;
qureg.quantum_expand_and_reconstruct_hash(reg);
//need to initialize a register to have the given states from list
//and a set of the same numbers but with negative amplitude and
//the next highest bit set, that is width + 1
// rest of the algorithm should proceed in same manner.
int offset = 1 << width0;
int i;
double norm = 1./Math.sqrt(initSize);
int ii = 0;
for (i = 0; i < list.length; ++i) {
reg.node[ii].state = list[i];
reg.node[ii].state |= offset;
reg.node[ii].amplitude.setReal(-norm);
++ii;
}
for (i = 0; i < list.length; ++i) {
reg.node[ii].state = list[i];
reg.node[ii].amplitude.setReal(norm);
++ii;
}
if (debug) {//DEBUG
System.out.format("AFTER init reg.size=%d "
+ "reg.width=%d reg.hash.length=%d\n", reg.size,
reg.width, (1 << reg.hashw));
qureg.quantum_print_qureg(reg);
}
return reg;
}
/**
* runtime complexity for the processing
* is O(reg.size * reg.width) * nLoop
* (the runtime complexity of the preparation of the register for the list,
* O(N),
* is ignored just as in the enumerated run method).
* NOTE that the width should be set to the most number of bits needed
* for any number in list.
* NOTE also that the largest number in the list must be
* .lte. integer.max_value - 2^width.
* NOTE that measurements of register reg are not taken.
* @param number a number to search for within the initialized register reg
* @param reg initialized register which holds nodes of state which are
* searched and have amplitudes which when squared and summed over register
* are equal to 1.
* @param rng
* @return
*/
public int processInitialized(int number, QuantumReg reg, Random rng) {
int i;
final int N = number;
QuReg qureg = new QuReg();
//DEBUG
//System.out.format("AFTER construction reg.size=%d\n", reg.size);
//qureg.quantum_print_qureg(reg);
// upper limit to number of iterations from:
//"Tight Bounds on Quantum Searching" by Boyer, Brassard, Hoyer, and Tapp
// NOTE that if the number of times number will appear in list
// is known ahead of time,
// the term in the sqrt can be divided by that multiplicity.
int end = (int) (Math.PI / 4 * Math.sqrt(1 << width0));
System.out.format("Iterating %d times\n", end);
Gates gates = new Gates(rng);
//runtime complexity is O(reg.size * reg.width) * nLoop
for (i = 1; i <= end; i++) {
System.out.format("Iteration #%d\n", i);
grover(N, reg, gates, qureg);
}
if (debug) { //DEBUG
System.out.format(
"AFTER grover reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
gates.quantum_hadamard(width0, reg);
if (debug) {//DEBUG//DEBUG
System.out.format(
"AFTER last hadamard reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
}
Measure measure = new Measure();
// runtime complexity is O(reg.size)
measure.quantum_bmeasure(width0, reg, rng);
//DEBUG
System.out.format(
"AFTER bmeasure reg.size=%d reg.width=%d\n",
reg.size, reg.width);
qureg.quantum_print_qureg(reg);
for (i = 0; i < reg.size; i++) {
if (reg.node[i].state == N) {
System.out.format(
"\nFound %d with a probability of %f\n\n", N,
reg.node[i].amplitude.squareSum());
return number;
}
}
return -1;
}
}
| added more notes to Grover.java on changes to make
| src/thirdparty/libquantum/Grover.java | added more notes to Grover.java on changes to make | <ide><path>rc/thirdparty/libquantum/Grover.java
<ide> */
<ide> private void oracle(int query, QuantumReg reg, Gates gates) {
<ide> int i;
<add>
<add> //TODO: need to either double the number of bits used in
<add> // initialization of the register to have those
<add> // available here or need to adjust the algorithm
<add> // to have same results with closer to current number of bits.
<ide>
<ide> /*
<ide> function f(x)
<ide> //reg.node[i].state ^= (1 << width0);
<ide>
<ide> reg.node[i].amplitude.times(-1);
<add> } else {
<add> // wanting to unset all set bit so that subsequent
<add> // hadamard gate doesn't create a node with value
<add> // query when it's absent.
<add> // cycling from a power of 2 might be responsible.
<add> //
<add> //
<add> // NOTE: adjustments to oracle1 to result in
<add> // a high bit set as a marker would be consistent
<add> // with the computational model.
<add> // then will use the highbit to unset bits
<add> // in the remaining states.
<add> // then the grover diffuser should work without
<add> // a period of numbers adding a state not present
<add> // in the original number list.
<add>
<add> //NOTE, when have it working well with just one bit extra,
<add> // should be able to change the initialization of the
<add> // register to only include the original numbers.
<add> // the 2nd set shifted and with a negative value
<add> // should be unecessary and may more complex physics
<add> // to implement. looks a little odd, but haven't
<add> // spent time on that yet...
<add>
<ide> }
<ide> }
<ide>
<ide> target, reg.size);
<ide> qureg.quantum_print_qureg(reg);
<ide> }
<add>
<add> /*
<add> NOTE: the diffusion filter H⊗n |2|0^n> -I_n| H⊗n
<add> can end up falsely creating a number which is not
<add> present in the initial list,
<add> but which is a cycle in the numbers,
<add> that is an offset from a power of 2 that is == query.
<add>
<add> for example, a list with a 2 and 7 but no 5
<add> resulted in changing the state of the 2 to 5
<add>
<add> looking at modifiying the oracle to use an extra high
<add> bit to mark the matches (those which are currently
<add> the ones with width0 bit flipped)
<add> and then use that in a gate to set all other
<add> bits to 0 when high bit is not set.
<add> that should avoid the cycling.
<add>
<add> */
<ide>
<ide> }
<ide> |
|
Java | epl-1.0 | 3032c2444c3ef958d907b80000c22d4290c1c75b | 0 | Snickermicker/smarthome,Snickermicker/smarthome,Snickermicker/smarthome,Snickermicker/smarthome | /**
* Copyright (c) 2014,2018 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.eclipse.smarthome.core.library.types;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Formatter;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.stream.Collectors;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.eclipse.smarthome.core.types.Command;
import org.eclipse.smarthome.core.types.ComplexType;
import org.eclipse.smarthome.core.types.PrimitiveType;
import org.eclipse.smarthome.core.types.State;
/**
* This type can be used for items that are dealing with GPS or
* location awareness functionality.
*
* @author Gaël L'hopital
* @author John Cocula
*
*/
@NonNullByDefault
public class PointType implements ComplexType, Command, State {
// external format patterns for output
public static final String LOCATION_PATTERN = "%2$s°N %3$s°E %1$sm";
public static final double EARTH_GRAVITATIONAL_CONSTANT = 3.986004418e14;
public static final double WGS84_A = 6378137; // The equatorial radius of WGS84 ellipsoid (6378137 m).
// constants for the constituents
public static final String KEY_LATITUDE = "lat";
public static final String KEY_LONGITUDE = "long";
public static final String KEY_ALTITUDE = "alt";
private static final BigDecimal CIRCLE = new BigDecimal(360);
private static final BigDecimal FLAT = new BigDecimal(180);
private static final BigDecimal RIGHT = new BigDecimal(90);
private BigDecimal latitude = BigDecimal.ZERO; // in decimal degrees
private BigDecimal longitude = BigDecimal.ZERO; // in decimal degrees
private BigDecimal altitude = BigDecimal.ZERO; // in decimal meters
/**
* Default constructor creates a point at sea level where the equator
* (0° latitude) and the prime meridian (0° longitude) intersect.
* A nullary constructor is needed by
* {@link org.eclipse.smarthome.core.internal.items.ItemUpdater#receiveUpdate})
*/
public PointType() {
}
public PointType(DecimalType latitude, DecimalType longitude) {
canonicalize(latitude, longitude);
}
public PointType(DecimalType latitude, DecimalType longitude, DecimalType altitude) {
this(latitude, longitude);
setAltitude(altitude);
}
public PointType(StringType latitude, StringType longitude) {
this(new DecimalType(latitude.toString()), new DecimalType(longitude.toString()));
}
public PointType(StringType latitude, StringType longitude, StringType altitude) {
this(new DecimalType(latitude.toString()), new DecimalType(longitude.toString()),
new DecimalType(altitude.toString()));
}
public PointType(String value) {
if (!value.isEmpty()) {
List<String> elements = Arrays.stream(value.split(",")).map(in -> in.trim()).collect(Collectors.toList());
if (elements.size() >= 2) {
canonicalize(new DecimalType(elements.get(0)), new DecimalType(elements.get(1)));
if (elements.size() == 3) {
setAltitude(new DecimalType(elements.get(2)));
} else if (elements.size() > 3) {
throw new IllegalArgumentException(value
+ " is not a valid PointType syntax. The syntax must not consist of more than 3 elements.");
}
} else {
throw new IllegalArgumentException(value + " is not a valid PointType syntax");
}
} else {
throw new IllegalArgumentException("Constructor argument must not be blank");
}
}
public DecimalType getLatitude() {
return new DecimalType(latitude);
}
public DecimalType getLongitude() {
return new DecimalType(longitude);
}
public DecimalType getAltitude() {
return new DecimalType(altitude);
}
public void setAltitude(DecimalType altitude) {
this.altitude = altitude.toBigDecimal();
}
public DecimalType getGravity() {
double latRad = Math.toRadians(latitude.doubleValue());
double deltaG = -2000.0 * (altitude.doubleValue() / 1000) * EARTH_GRAVITATIONAL_CONSTANT
/ (Math.pow(WGS84_A, 3.0));
double sin2lat = Math.sin(latRad) * Math.sin(latRad);
double sin22lat = Math.sin(2.0 * latRad) * Math.sin(2.0 * latRad);
double result = (9.780327 * (1.0 + 5.3024e-3 * sin2lat - 5.8e-6 * sin22lat) + deltaG);
return new DecimalType(result);
}
/**
* Return the distance in meters from otherPoint, ignoring altitude. This algorithm also
* ignores the oblate spheroid shape of Earth and assumes a perfect sphere, so results
* are inexact.
*
* @param otherPoint
* @return distance in meters
* @see <a href="https://en.wikipedia.org/wiki/Haversine_formula">Haversine formula</a>
*/
public DecimalType distanceFrom(PointType otherPoint) {
double dLat = Math.toRadians(otherPoint.latitude.doubleValue() - this.latitude.doubleValue());
double dLong = Math.toRadians(otherPoint.longitude.doubleValue() - this.longitude.doubleValue());
double a = Math.pow(Math.sin(dLat / 2D), 2D) + Math.cos(Math.toRadians(this.latitude.doubleValue()))
* Math.cos(Math.toRadians(otherPoint.latitude.doubleValue())) * Math.pow(Math.sin(dLong / 2D), 2D);
double c = 2D * Math.atan2(Math.sqrt(a), Math.sqrt(1D - a));
return new DecimalType(WGS84_A * c);
}
/**
* Formats the value of this type according to a pattern (see {@link Formatter}). One single value of this type can
* be referenced by the pattern using an index. The item order is defined by the natural (alphabetical) order of
* their keys.
*
* @param pattern the pattern to use containing indexes to reference the single elements of this type
* @return the formatted string
*/
@Override
public String format(@Nullable String pattern) {
String formatPattern = pattern;
if (formatPattern == null || "%s".equals(formatPattern)) {
formatPattern = LOCATION_PATTERN;
}
return String.format(formatPattern, getConstituents().values().toArray());
}
public static PointType valueOf(String value) {
return new PointType(value);
}
@Override
public String toString() {
return toFullString();
}
@Override
public String toFullString() {
StringBuilder sb = new StringBuilder(latitude.toPlainString());
sb.append(',');
sb.append(longitude.toPlainString());
if (!altitude.equals(BigDecimal.ZERO)) {
sb.append(',');
sb.append(altitude.toPlainString());
}
return sb.toString();
}
@Override
public SortedMap<String, PrimitiveType> getConstituents() {
SortedMap<String, PrimitiveType> result = new TreeMap<>();
result.put(KEY_LATITUDE, getLatitude());
result.put(KEY_LONGITUDE, getLongitude());
result.put(KEY_ALTITUDE, getAltitude());
return result;
}
/**
* Canonicalize the current latitude and longitude values such that:
*
* <pre>
* -90 <= latitude <= +90 - 180 < longitude <= +180
* </pre>
*/
private void canonicalize(DecimalType aLat, DecimalType aLon) {
latitude = FLAT.add(aLat.toBigDecimal()).remainder(CIRCLE);
longitude = aLon.toBigDecimal();
if (latitude.compareTo(BigDecimal.ZERO) == -1) {
latitude = latitude.add(CIRCLE);
}
latitude = latitude.subtract(FLAT);
if (latitude.compareTo(RIGHT) == 1) {
latitude = FLAT.subtract(latitude);
longitude = longitude.add(FLAT);
} else if (latitude.compareTo(RIGHT.negate()) == -1) {
latitude = FLAT.negate().subtract(latitude);
longitude = longitude.add(FLAT);
}
longitude = FLAT.add(longitude).remainder(CIRCLE);
if (longitude.compareTo(BigDecimal.ZERO) <= 0) {
longitude = longitude.add(CIRCLE);
}
longitude = longitude.subtract(FLAT);
}
@Override
public int hashCode() {
int tmp = 10000 * getLatitude().hashCode();
tmp += 100 * getLongitude().hashCode();
tmp += getAltitude().hashCode();
return tmp;
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof PointType)) {
return false;
}
PointType other = (PointType) obj;
if (!getLatitude().equals(other.getLatitude()) || !getLongitude().equals(other.getLongitude())
|| !getAltitude().equals(other.getAltitude())) {
return false;
}
return true;
}
}
| bundles/core/org.eclipse.smarthome.core/src/main/java/org/eclipse/smarthome/core/library/types/PointType.java | /**
* Copyright (c) 2014,2018 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.eclipse.smarthome.core.library.types;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.stream.Collectors;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
import org.eclipse.smarthome.core.types.Command;
import org.eclipse.smarthome.core.types.ComplexType;
import org.eclipse.smarthome.core.types.PrimitiveType;
import org.eclipse.smarthome.core.types.State;
/**
* This type can be used for items that are dealing with GPS or
* location awareness functionality.
*
* @author Gaël L'hopital
* @author John Cocula
*
*/
@NonNullByDefault
public class PointType implements ComplexType, Command, State {
public static final double EARTH_GRAVITATIONAL_CONSTANT = 3.986004418e14;
public static final double WGS84_A = 6378137; // The equatorial radius of WGS84 ellipsoid (6378137 m).
// constants for the constituents
public static final String KEY_LATITUDE = "lat";
public static final String KEY_LONGITUDE = "long";
public static final String KEY_ALTITUDE = "alt";
private static final BigDecimal CIRCLE = new BigDecimal(360);
private static final BigDecimal FLAT = new BigDecimal(180);
private static final BigDecimal RIGHT = new BigDecimal(90);
private BigDecimal latitude = BigDecimal.ZERO; // in decimal degrees
private BigDecimal longitude = BigDecimal.ZERO; // in decimal degrees
private BigDecimal altitude = BigDecimal.ZERO; // in decimal meters
/**
* Default constructor creates a point at sea level where the equator
* (0° latitude) and the prime meridian (0° longitude) intersect.
* A nullary constructor is needed by
* {@link org.eclipse.smarthome.core.internal.items.ItemUpdater#receiveUpdate})
*/
public PointType() {
}
public PointType(DecimalType latitude, DecimalType longitude) {
canonicalize(latitude, longitude);
}
public PointType(DecimalType latitude, DecimalType longitude, DecimalType altitude) {
this(latitude, longitude);
setAltitude(altitude);
}
public PointType(StringType latitude, StringType longitude) {
this(new DecimalType(latitude.toString()), new DecimalType(longitude.toString()));
}
public PointType(StringType latitude, StringType longitude, StringType altitude) {
this(new DecimalType(latitude.toString()), new DecimalType(longitude.toString()),
new DecimalType(altitude.toString()));
}
public PointType(String value) {
if (!value.isEmpty()) {
List<String> elements = Arrays.stream(value.split(",")).map(in -> in.trim()).collect(Collectors.toList());
if (elements.size() >= 2) {
canonicalize(new DecimalType(elements.get(0)), new DecimalType(elements.get(1)));
if (elements.size() == 3) {
setAltitude(new DecimalType(elements.get(2)));
} else if (elements.size() > 3) {
throw new IllegalArgumentException(value
+ " is not a valid PointType syntax. The syntax must not consist of more than 3 elements.");
}
} else {
throw new IllegalArgumentException(value + " is not a valid PointType syntax");
}
} else {
throw new IllegalArgumentException("Constructor argument must not be blank");
}
}
public DecimalType getLatitude() {
return new DecimalType(latitude);
}
public DecimalType getLongitude() {
return new DecimalType(longitude);
}
public DecimalType getAltitude() {
return new DecimalType(altitude);
}
public void setAltitude(DecimalType altitude) {
this.altitude = altitude.toBigDecimal();
}
public DecimalType getGravity() {
double latRad = Math.toRadians(latitude.doubleValue());
double deltaG = -2000.0 * (altitude.doubleValue() / 1000) * EARTH_GRAVITATIONAL_CONSTANT
/ (Math.pow(WGS84_A, 3.0));
double sin2lat = Math.sin(latRad) * Math.sin(latRad);
double sin22lat = Math.sin(2.0 * latRad) * Math.sin(2.0 * latRad);
double result = (9.780327 * (1.0 + 5.3024e-3 * sin2lat - 5.8e-6 * sin22lat) + deltaG);
return new DecimalType(result);
}
/**
* Return the distance in meters from otherPoint, ignoring altitude. This algorithm also
* ignores the oblate spheroid shape of Earth and assumes a perfect sphere, so results
* are inexact.
*
* @param otherPoint
* @return distance in meters
* @see <a href="https://en.wikipedia.org/wiki/Haversine_formula">Haversine formula</a>
*/
public DecimalType distanceFrom(PointType otherPoint) {
double dLat = Math.toRadians(otherPoint.latitude.doubleValue() - this.latitude.doubleValue());
double dLong = Math.toRadians(otherPoint.longitude.doubleValue() - this.longitude.doubleValue());
double a = Math.pow(Math.sin(dLat / 2D), 2D) + Math.cos(Math.toRadians(this.latitude.doubleValue()))
* Math.cos(Math.toRadians(otherPoint.latitude.doubleValue())) * Math.pow(Math.sin(dLong / 2D), 2D);
double c = 2D * Math.atan2(Math.sqrt(a), Math.sqrt(1D - a));
return new DecimalType(WGS84_A * c);
}
/**
* <p>
* Formats the value of this type according to a pattern (@see {@link Formatter}). One single value of this type can
* be referenced by the pattern using an index. The item order is defined by the natural (alphabetical) order of
* their keys.
*
* @param pattern the pattern to use containing indexes to reference the
* single elements of this type.
*/
@Override
public String format(String pattern) {
return String.format(pattern, getConstituents().values().toArray());
}
public static PointType valueOf(String value) {
return new PointType(value);
}
@Override
public String toString() {
return toFullString();
}
@Override
public String toFullString() {
StringBuilder sb = new StringBuilder(latitude.toPlainString());
sb.append(',');
sb.append(longitude.toPlainString());
if (!altitude.equals(BigDecimal.ZERO)) {
sb.append(',');
sb.append(altitude.toPlainString());
}
return sb.toString();
}
@Override
public SortedMap<String, PrimitiveType> getConstituents() {
SortedMap<String, PrimitiveType> result = new TreeMap<>();
result.put(KEY_LATITUDE, getLatitude());
result.put(KEY_LONGITUDE, getLongitude());
result.put(KEY_ALTITUDE, getAltitude());
return result;
}
/**
* Canonicalize the current latitude and longitude values such that:
*
* <pre>
* -90 <= latitude <= +90 - 180 < longitude <= +180
* </pre>
*/
private void canonicalize(DecimalType aLat, DecimalType aLon) {
latitude = FLAT.add(aLat.toBigDecimal()).remainder(CIRCLE);
longitude = aLon.toBigDecimal();
if (latitude.compareTo(BigDecimal.ZERO) == -1) {
latitude = latitude.add(CIRCLE);
}
latitude = latitude.subtract(FLAT);
if (latitude.compareTo(RIGHT) == 1) {
latitude = FLAT.subtract(latitude);
longitude = longitude.add(FLAT);
} else if (latitude.compareTo(RIGHT.negate()) == -1) {
latitude = FLAT.negate().subtract(latitude);
longitude = longitude.add(FLAT);
}
longitude = FLAT.add(longitude).remainder(CIRCLE);
if (longitude.compareTo(BigDecimal.ZERO) <= 0) {
longitude = longitude.add(CIRCLE);
}
longitude = longitude.subtract(FLAT);
}
@Override
public int hashCode() {
int tmp = 10000 * getLatitude().hashCode();
tmp += 100 * getLongitude().hashCode();
tmp += getAltitude().hashCode();
return tmp;
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof PointType)) {
return false;
}
PointType other = (PointType) obj;
if (!getLatitude().equals(other.getLatitude()) || !getLongitude().equals(other.getLongitude())
|| !getAltitude().equals(other.getAltitude())) {
return false;
}
return true;
}
}
| [core] Added default pattern for formatting 'PointType' values (#6190)
* Added default pattern for 'PointType'
Signed-off-by: Christoph Weitkamp <[email protected]> | bundles/core/org.eclipse.smarthome.core/src/main/java/org/eclipse/smarthome/core/library/types/PointType.java | [core] Added default pattern for formatting 'PointType' values (#6190) | <ide><path>undles/core/org.eclipse.smarthome.core/src/main/java/org/eclipse/smarthome/core/library/types/PointType.java
<ide>
<ide> import java.math.BigDecimal;
<ide> import java.util.Arrays;
<add>import java.util.Formatter;
<ide> import java.util.List;
<ide> import java.util.SortedMap;
<ide> import java.util.TreeMap;
<ide> */
<ide> @NonNullByDefault
<ide> public class PointType implements ComplexType, Command, State {
<add>
<add> // external format patterns for output
<add> public static final String LOCATION_PATTERN = "%2$s°N %3$s°E %1$sm";
<ide>
<ide> public static final double EARTH_GRAVITATIONAL_CONSTANT = 3.986004418e14;
<ide> public static final double WGS84_A = 6378137; // The equatorial radius of WGS84 ellipsoid (6378137 m).
<ide> }
<ide>
<ide> /**
<del> * <p>
<del> * Formats the value of this type according to a pattern (@see {@link Formatter}). One single value of this type can
<add> * Formats the value of this type according to a pattern (see {@link Formatter}). One single value of this type can
<ide> * be referenced by the pattern using an index. The item order is defined by the natural (alphabetical) order of
<ide> * their keys.
<ide> *
<del> * @param pattern the pattern to use containing indexes to reference the
<del> * single elements of this type.
<del> */
<del> @Override
<del> public String format(String pattern) {
<del> return String.format(pattern, getConstituents().values().toArray());
<add> * @param pattern the pattern to use containing indexes to reference the single elements of this type
<add> * @return the formatted string
<add> */
<add> @Override
<add> public String format(@Nullable String pattern) {
<add> String formatPattern = pattern;
<add>
<add> if (formatPattern == null || "%s".equals(formatPattern)) {
<add> formatPattern = LOCATION_PATTERN;
<add> }
<add>
<add> return String.format(formatPattern, getConstituents().values().toArray());
<ide> }
<ide>
<ide> public static PointType valueOf(String value) { |
|
Java | agpl-3.0 | e9ce03a49c9ecd06a6803f248114ccd7f8cd0f13 | 0 | picoded/JavaCommons,picoded/JavaCommons,picoded/JavaCommons,picoded/JavaCommons,picoded/JavaCommons,picoded/JavaCommons,picoded/JavaCommons | package picodedTests.RESTBuilder.templates;
import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
import java.net.Socket;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.catalina.LifecycleException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import picoded.JStack.JStackException;
import picoded.JStruct.JStruct;
import picoded.JStruct.MetaObject;
import picoded.JStruct.MetaTable;
import picoded.RESTBuilder.RESTBuilder;
import picoded.RESTBuilder.templates.MetaTableApiBuilder;
import picoded.conv.ConvertJSON;
import picoded.conv.GUID;
import picoded.servlet.BasePage;
import picoded.servletUtils.EmbeddedServlet;
import picoded.webUtils.RequestHttp;
import picoded.webUtils.ResponseHttp;
public class MetaTableApiBuilderTomcat_test {
@SuppressWarnings("serial")
public static class MetaTableApiServlet extends BasePage {
public boolean isJsonRequest() {
return true;
}
// / Process the request, not the authentication layer
public boolean doJSON(Map<String, Object> outputData, Map<String, Object> templateData) throws Exception {
return rb.servletCall("", this, outputData);
}
}
protected static EmbeddedServlet tomcat = null;
private static List<String> _oids = null;
private static MetaTable mtObj = null;
private static MetaTableApiBuilder mtApi = null;
private static RESTBuilder rb = null;
protected static int port = 15000;
protected static boolean portAvailableCalled = false;
private static MetaTable implementationConstructor() {
return (new JStruct()).getMetaTable("test");
}
private static void populateMetaTableDummyData(int min, int max) {
Random rnd = new Random();
int _max = rnd.nextInt(max);
_max = _max > min ? _max : min;
_oids = new ArrayList<String>();
for (int i = 0; i < _max; ++i) {
String oid = GUID.base58();
_oids.add(oid);
Map<String, Object> innerObj = new HashMap<String, Object>();
innerObj.put("_oid", oid);
innerObj.put("_name", "name" + i);
innerObj.put("_age", "age" + i);
mtObj.append(oid, innerObj).saveAll();
}
}
@BeforeClass
public static void serverSetUp() throws LifecycleException, JStackException {
if (!portAvailableCalled) {
while (!portAvailableCalled) {
available(port);
if (!portAvailableCalled) {
port += 100;
}
}
}
mtObj = implementationConstructor();
mtObj.systemSetup();
populateMetaTableDummyData(3, 3);
mtApi = new MetaTableApiBuilder(mtObj);
rb = new RESTBuilder();
mtApi.setupRESTBuilder(rb, "/meta-test/");
if (tomcat == null) {
File webInfFile = new File("./test-files/tmp/WEB-INF");
if (webInfFile.exists()) {
for (File file : webInfFile.listFiles()) {
file.delete(); // to accomodate certain people who do not
// use command line
}
}
webInfFile.mkdir();
File context = new File("./test-files/tmp");
tomcat = new EmbeddedServlet("", context)
.withServlet("/api/*", "meta-table-test", new MetaTableApiServlet()).withPort(port);
tomcat.start();
// tomcat.awaitServer();
}
}
@AfterClass
public static void serverTearDown() throws LifecycleException {
if (mtObj != null) {
mtObj.systemTeardown();
}
mtObj = null;
if (tomcat != null) {
tomcat.stop();
}
tomcat = null;
}
// @Test
// public void awaitServer() {
// tomcat.awaitServer();
// }
RequestHttp requester;
ResponseHttp response;
Map<String, Object> responseMap;
@Test
public void list_POST_test() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/list";
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
// Map<String, String[]> headersMap = new HashMap<String, String[]>();
paramsMap.put("headers", new String[] { "[\"_oid\"]" });
response = RequestHttp.post(path, paramsMap, null, null);
assertNotNull(response);
Map<String, Object> resMap = response.toMap();
@SuppressWarnings("unchecked")
List<List<String>> dataList = (List<List<String>>) resMap.get("data");
assertNotNull(dataList);
List<String> convList = new ArrayList<String>();
for (List<String> innerList : dataList) {
convList.addAll(innerList);
}
boolean contains = convList.containsAll(_oids);
assertTrue(contains);
}
@Test
public void meta_GET_test() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/get";
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(0) });
// Map<String, String[]> headersMap = new HashMap<String, String[]>();
response = RequestHttp.get(path, paramsMap, null, null);
if (response.statusCode() == 404) {
System.out.println("RESPONSE STATUS: " + 404);
}
assertNotNull(response);
// Map<String, String[]> resHeaders = response.headersMap();
}
@Test
public void meta_POST_test_delta() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/post";
String jsonString = "";
Map<String, Object> deltaObj = new HashMap<String, Object>();
deltaObj.put("_name", "DeltaReplacedName");
deltaObj.put("_age", "DeltaReplacedAge");
jsonString = ConvertJSON.fromMap(deltaObj);
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(0) });
paramsMap.put("updateMode", new String[] { "delta" });
paramsMap.put("meta", new String[] { jsonString });
response = RequestHttp.post(path, paramsMap, null, null);
assertNotNull(response);
Map<String, Object> newMetaObj = new HashMap<String, Object>();
newMetaObj.put("_name", "NewMetaObjectName");
newMetaObj.put("_age", "NewMetaObjectAge");
}
@Test
public void meta_POST_test_full() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/post";
String jsonString = "";
Map<String, Object> fullObj = new HashMap<String, Object>();
fullObj.put("_name", "FullReplacedName");
jsonString = ConvertJSON.fromMap(fullObj);
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(1) });
paramsMap.put("updateMode", new String[] { "full" });
paramsMap.put("meta", new String[] { jsonString });
response = RequestHttp.post(path, paramsMap, null, null);
assertNotNull(response);
}
@Test
public void meta_POST_test_new() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/post";
String jsonString = "";
Map<String, Object> newMetaObj = new HashMap<String, Object>();
newMetaObj.put("_name", "NewMetaObjectName");
newMetaObj.put("_age", "NewMetaObjectAge");
jsonString = ConvertJSON.fromMap(newMetaObj);
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { "new" });
paramsMap.put("meta", new String[] { jsonString });
response = RequestHttp.post(path, paramsMap, null, null);
assertNotNull(response);
}
@Test
public void meta_DELETE_test() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/meta";
String getPath = "http://127.0.0.1:" + port + "/api/meta-test/meta";
Map<String, Object> respMap = null;
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(0) });
// first get user
response = RequestHttp.get(getPath, paramsMap, null, null);
assertNotNull(respMap = response.toMap());
assertNotNull(respMap.get("meta"));
// then delete
response = RequestHttp.delete(path, paramsMap, null, null);
assertNotNull(respMap = response.toMap());
// then check again
response = RequestHttp.get(getPath, paramsMap, null, null);
assertNotNull(respMap = response.toMap());
assertNull(respMap.get("meta"));
}
@SuppressWarnings("try")
private static boolean available(int port) {
if (!portAvailableCalled) {
try (Socket ignored = new Socket("localhost", port)) {
return false;
} catch (IOException ignored) {
portAvailableCalled = true;
//System.out.println(" PORT : " + port);
return true;
}
}
return true;
}
@Test
public void list_GET_and_POST_AllBlankTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
response = RequestHttp.get("http://127.0.0.1:" + port + "/api/meta-test/list", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull(responseMap.get("error"));
assertNotNull(responseMap.get("data"));
}
@Test
public void list_GET_and_POSTTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("caseSensitive", new String[] { "true" });
paramsMap.put("queryColumns", new String[] { "_name", "_age" });
paramsMap.put("searchValue", new String[] { "abc", "def", "xyz" });
response = RequestHttp.get("http://127.0.0.1:" + port + "/api/meta-test/list", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull(responseMap.get("error"));
assertNotNull(responseMap.get("data"));
}
@Test
public void list_GET_and_POSTOrderByTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("caseSensitive", new String[] { "true" });
paramsMap.put("queryColumns", new String[] { "_name", "_age" });
paramsMap.put("searchValue", new String[] { "xyz" });
paramsMap.put("orderBy", new String[] { "_name" });
response = RequestHttp.get("http://127.0.0.1:" + port + "/api/meta-test/list", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull(responseMap.get("error"));
assertNotNull(responseMap.get("data"));
assertNotNull(responseMap.get("draw"));
assertNotNull(responseMap.get("headers"));
}
@Test
public void list_GET_and_POSTQueryTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("caseSensitive", new String[] { "true" });
paramsMap.put("queryColumns", new String[] { "_name", "_oid" });
paramsMap.put("searchValue", new String[] { "xyz", "123456" });
paramsMap.put("queryArgs", new String[] { "25", "65" });
paramsMap.put("orderBy", new String[] { "_name" });
paramsMap.put("query", new String[] { "_age > ? AND _age < ? " });
response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/list", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull(responseMap.get("error"));
assertNotNull(responseMap.get("data"));
assertNotNull(responseMap.get("draw"));
assertNotNull(responseMap.get("headers"));
}
@Test
public void csv_exportAllParamest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("caseSensitive", new String[] { "true" });
paramsMap.put("queryColumns", new String[] { "_name", "_oid" });
paramsMap.put("searchValue", new String[] { "xyz", "123456" });
paramsMap.put("queryArgs", new String[] { "25", "65" });
paramsMap.put("orderBy", new String[] { "_name" });
paramsMap.put("query", new String[] { "_age > ? AND _age < ? " });
response = RequestHttp.get("http://127.0.0.1:" + port + "/api/meta-test/csv", paramsMap);
assertNotNull(response);
}
@Test
public void csv_exportRESTTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/csv", paramsMap);
assertNotNull(response);
}
@Test
public void meta_POSTInvalidTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(0) });
response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/meta", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertEquals("No meta object was found in the request", responseMap.get("error"));
}
@Test
public void meta_POSTTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(0) });
paramsMap.put("meta", new String[] { "{\"oop\":\"java\",\"foo\":\"class\"}" });
response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/meta", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull("No meta object was found in the request", responseMap.get("error"));
LinkedHashMap updatedMObj = (LinkedHashMap)responseMap.get("updateMeta");
assertEquals("java", updatedMObj.get("oop"));
}
@Test
public void meta_POSTAllParamTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(0) });
paramsMap.put("meta", new String[] { "{\"oop\":\"java\",\"foo\":\"class\"}" });
paramsMap.put("updateMode", new String[] { "updateMode" });
response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/meta", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull("No meta object was found in the request", responseMap.get("error"));
LinkedHashMap updatedMObj = (LinkedHashMap)responseMap.get("updateMeta");
assertEquals("java", updatedMObj.get("oop"));
}
}
| src/picodedTests/RESTBuilder/templates/MetaTableApiBuilderTomcat_test.java | package picodedTests.RESTBuilder.templates;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.net.Socket;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.catalina.LifecycleException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import picoded.JStack.JStackException;
import picoded.JStruct.JStruct;
import picoded.JStruct.MetaTable;
import picoded.RESTBuilder.RESTBuilder;
import picoded.RESTBuilder.templates.MetaTableApiBuilder;
import picoded.conv.ConvertJSON;
import picoded.conv.GUID;
import picoded.servlet.BasePage;
import picoded.servletUtils.EmbeddedServlet;
import picoded.webUtils.RequestHttp;
import picoded.webUtils.ResponseHttp;
public class MetaTableApiBuilderTomcat_test {
@SuppressWarnings("serial")
public static class MetaTableApiServlet extends BasePage {
public boolean isJsonRequest() {
return true;
}
// / Process the request, not the authentication layer
public boolean doJSON(Map<String, Object> outputData, Map<String, Object> templateData) throws Exception {
return rb.servletCall("", this, outputData);
}
}
protected static EmbeddedServlet tomcat = null;
private static List<String> _oids = null;
private static MetaTable mtObj = null;
private static MetaTableApiBuilder mtApi = null;
private static RESTBuilder rb = null;
protected static int port = 15000;
protected static boolean portAvailableCalled = false;
private static MetaTable implementationConstructor() {
return (new JStruct()).getMetaTable("test");
}
private static void populateMetaTableDummyData(int min, int max) {
Random rnd = new Random();
int _max = rnd.nextInt(max);
_max = _max > min ? _max : min;
_oids = new ArrayList<String>();
for (int i = 0; i < _max; ++i) {
String oid = GUID.base58();
_oids.add(oid);
Map<String, Object> innerObj = new HashMap<String, Object>();
innerObj.put("_oid", oid);
innerObj.put("_name", "name" + i);
innerObj.put("_age", "age" + i);
mtObj.append(oid, innerObj).saveAll();
}
}
@BeforeClass
public static void serverSetUp() throws LifecycleException, JStackException {
if (!portAvailableCalled) {
while (!portAvailableCalled) {
available(port);
if (!portAvailableCalled) {
port += 100;
}
}
}
mtObj = implementationConstructor();
mtObj.systemSetup();
populateMetaTableDummyData(3, 3);
mtApi = new MetaTableApiBuilder(mtObj);
rb = new RESTBuilder();
mtApi.setupRESTBuilder(rb, "/meta-test/");
if (tomcat == null) {
File webInfFile = new File("./test-files/tmp/WEB-INF");
if (webInfFile.exists()) {
for (File file : webInfFile.listFiles()) {
file.delete(); // to accomodate certain people who do not
// use command line
}
}
webInfFile.mkdir();
File context = new File("./test-files/tmp");
tomcat = new EmbeddedServlet("", context)
.withServlet("/api/*", "meta-table-test", new MetaTableApiServlet()).withPort(port);
tomcat.start();
// tomcat.awaitServer();
}
}
@AfterClass
public static void serverTearDown() throws LifecycleException {
if (mtObj != null) {
mtObj.systemTeardown();
}
mtObj = null;
if (tomcat != null) {
tomcat.stop();
}
tomcat = null;
}
// @Test
// public void awaitServer() {
// tomcat.awaitServer();
// }
RequestHttp requester;
ResponseHttp response;
Map<String, Object> responseMap;
@Test
public void list_POST_test() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/list";
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
// Map<String, String[]> headersMap = new HashMap<String, String[]>();
paramsMap.put("headers", new String[] { "[\"_oid\"]" });
response = RequestHttp.post(path, paramsMap, null, null);
assertNotNull(response);
Map<String, Object> resMap = response.toMap();
@SuppressWarnings("unchecked")
List<List<String>> dataList = (List<List<String>>) resMap.get("data");
assertNotNull(dataList);
List<String> convList = new ArrayList<String>();
for (List<String> innerList : dataList) {
convList.addAll(innerList);
}
boolean contains = convList.containsAll(_oids);
assertTrue(contains);
}
@Test
public void meta_GET_test() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/get";
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(0) });
// Map<String, String[]> headersMap = new HashMap<String, String[]>();
response = RequestHttp.get(path, paramsMap, null, null);
if (response.statusCode() == 404) {
System.out.println("RESPONSE STATUS: " + 404);
}
assertNotNull(response);
// Map<String, String[]> resHeaders = response.headersMap();
}
@Test
public void meta_POST_test_delta() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/post";
String jsonString = "";
Map<String, Object> deltaObj = new HashMap<String, Object>();
deltaObj.put("_name", "DeltaReplacedName");
deltaObj.put("_age", "DeltaReplacedAge");
jsonString = ConvertJSON.fromMap(deltaObj);
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(0) });
paramsMap.put("updateMode", new String[] { "delta" });
paramsMap.put("meta", new String[] { jsonString });
response = RequestHttp.post(path, paramsMap, null, null);
assertNotNull(response);
Map<String, Object> newMetaObj = new HashMap<String, Object>();
newMetaObj.put("_name", "NewMetaObjectName");
newMetaObj.put("_age", "NewMetaObjectAge");
}
@Test
public void meta_POST_test_full() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/post";
String jsonString = "";
Map<String, Object> fullObj = new HashMap<String, Object>();
fullObj.put("_name", "FullReplacedName");
jsonString = ConvertJSON.fromMap(fullObj);
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(1) });
paramsMap.put("updateMode", new String[] { "full" });
paramsMap.put("meta", new String[] { jsonString });
response = RequestHttp.post(path, paramsMap, null, null);
assertNotNull(response);
}
@Test
public void meta_POST_test_new() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/post";
String jsonString = "";
Map<String, Object> newMetaObj = new HashMap<String, Object>();
newMetaObj.put("_name", "NewMetaObjectName");
newMetaObj.put("_age", "NewMetaObjectAge");
jsonString = ConvertJSON.fromMap(newMetaObj);
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { "new" });
paramsMap.put("meta", new String[] { jsonString });
response = RequestHttp.post(path, paramsMap, null, null);
assertNotNull(response);
}
@Test
public void meta_DELETE_test() {
String path = "http://127.0.0.1:" + port + "/api/meta-test/meta";
String getPath = "http://127.0.0.1:" + port + "/api/meta-test/meta";
Map<String, Object> respMap = null;
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("_oid", new String[] { _oids.get(0) });
// first get user
response = RequestHttp.get(getPath, paramsMap, null, null);
assertNotNull(respMap = response.toMap());
assertNotNull(respMap.get("meta"));
// then delete
response = RequestHttp.delete(path, paramsMap, null, null);
assertNotNull(respMap = response.toMap());
// then check again
response = RequestHttp.get(getPath, paramsMap, null, null);
assertNotNull(respMap = response.toMap());
assertNull(respMap.get("meta"));
}
@SuppressWarnings("try")
private static boolean available(int port) {
if (!portAvailableCalled) {
try (Socket ignored = new Socket("localhost", port)) {
return false;
} catch (IOException ignored) {
portAvailableCalled = true;
//System.out.println(" PORT : " + port);
return true;
}
}
return true;
}
@Test
public void list_GET_and_POST_AllBlankTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
response = RequestHttp.get("http://127.0.0.1:" + port + "/api/meta-test/list", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull(responseMap.get("error"));
assertNotNull(responseMap.get("data"));
}
@Test
public void list_GET_and_POSTTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("caseSensitive", new String[] { "true" });
paramsMap.put("queryColumns", new String[] { "_name", "_age" });
paramsMap.put("searchValue", new String[] { "abc", "def", "xyz" });
response = RequestHttp.get("http://127.0.0.1:" + port + "/api/meta-test/list", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull(responseMap.get("error"));
assertNotNull(responseMap.get("data"));
}
@Test
public void list_GET_and_POSTOrderByTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("caseSensitive", new String[] { "true" });
paramsMap.put("queryColumns", new String[] { "_name", "_age" });
paramsMap.put("searchValue", new String[] { "xyz" });
paramsMap.put("orderBy", new String[] { "_name" });
response = RequestHttp.get("http://127.0.0.1:" + port + "/api/meta-test/list", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull(responseMap.get("error"));
assertNotNull(responseMap.get("data"));
assertNotNull(responseMap.get("draw"));
assertNotNull(responseMap.get("headers"));
}
@Test
public void list_GET_and_POSTQueryTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("caseSensitive", new String[] { "true" });
paramsMap.put("queryColumns", new String[] { "_name", "_oid" });
paramsMap.put("searchValue", new String[] { "xyz", "123456" });
paramsMap.put("queryArgs", new String[] { "25", "65" });
paramsMap.put("orderBy", new String[] { "_name" });
paramsMap.put("query", new String[] { "_age > ? AND _age < ? " });
response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/list", paramsMap);
assertNotNull(response);
assertNotNull(responseMap = response.toMap());
assertNull(responseMap.get("error"));
assertNotNull(responseMap.get("data"));
assertNotNull(responseMap.get("draw"));
assertNotNull(responseMap.get("headers"));
}
@Test
public void csv_exportAllParamest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
paramsMap.put("caseSensitive", new String[] { "true" });
paramsMap.put("queryColumns", new String[] { "_name", "_oid" });
paramsMap.put("searchValue", new String[] { "xyz", "123456" });
paramsMap.put("queryArgs", new String[] { "25", "65" });
paramsMap.put("orderBy", new String[] { "_name" });
paramsMap.put("query", new String[] { "_age > ? AND _age < ? " });
response = RequestHttp.get("http://127.0.0.1:" + port + "/api/meta-test/csv", paramsMap);
assertNotNull(response);
}
@Test
public void csv_exportRESTTest() {
Map<String, String[]> paramsMap = new HashMap<String, String[]>();
response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/csv", paramsMap);
assertNotNull(response);
}
}
| commit_message
| src/picodedTests/RESTBuilder/templates/MetaTableApiBuilderTomcat_test.java | commit_message | <ide><path>rc/picodedTests/RESTBuilder/templates/MetaTableApiBuilderTomcat_test.java
<ide> package picodedTests.RESTBuilder.templates;
<ide>
<del>import static org.junit.Assert.assertNotNull;
<del>import static org.junit.Assert.assertNull;
<del>import static org.junit.Assert.assertTrue;
<add>import static org.junit.Assert.*;
<ide>
<ide> import java.io.File;
<ide> import java.io.IOException;
<ide> import java.net.Socket;
<ide> import java.util.ArrayList;
<ide> import java.util.HashMap;
<add>import java.util.LinkedHashMap;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide> import java.util.Random;
<ide>
<ide> import picoded.JStack.JStackException;
<ide> import picoded.JStruct.JStruct;
<add>import picoded.JStruct.MetaObject;
<ide> import picoded.JStruct.MetaTable;
<ide> import picoded.RESTBuilder.RESTBuilder;
<ide> import picoded.RESTBuilder.templates.MetaTableApiBuilder;
<ide> assertNotNull(response);
<ide> }
<ide>
<add> @Test
<add> public void meta_POSTInvalidTest() {
<add> Map<String, String[]> paramsMap = new HashMap<String, String[]>();
<add> paramsMap.put("_oid", new String[] { _oids.get(0) });
<add> response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/meta", paramsMap);
<add> assertNotNull(response);
<add> assertNotNull(responseMap = response.toMap());
<add> assertEquals("No meta object was found in the request", responseMap.get("error"));
<add> }
<add>
<add> @Test
<add> public void meta_POSTTest() {
<add> Map<String, String[]> paramsMap = new HashMap<String, String[]>();
<add> paramsMap.put("_oid", new String[] { _oids.get(0) });
<add> paramsMap.put("meta", new String[] { "{\"oop\":\"java\",\"foo\":\"class\"}" });
<add> response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/meta", paramsMap);
<add> assertNotNull(response);
<add> assertNotNull(responseMap = response.toMap());
<add> assertNull("No meta object was found in the request", responseMap.get("error"));
<add> LinkedHashMap updatedMObj = (LinkedHashMap)responseMap.get("updateMeta");
<add> assertEquals("java", updatedMObj.get("oop"));
<add> }
<add>
<add> @Test
<add> public void meta_POSTAllParamTest() {
<add> Map<String, String[]> paramsMap = new HashMap<String, String[]>();
<add> paramsMap.put("_oid", new String[] { _oids.get(0) });
<add> paramsMap.put("meta", new String[] { "{\"oop\":\"java\",\"foo\":\"class\"}" });
<add> paramsMap.put("updateMode", new String[] { "updateMode" });
<add> response = RequestHttp.post("http://127.0.0.1:" + port + "/api/meta-test/meta", paramsMap);
<add> assertNotNull(response);
<add> assertNotNull(responseMap = response.toMap());
<add> assertNull("No meta object was found in the request", responseMap.get("error"));
<add> LinkedHashMap updatedMObj = (LinkedHashMap)responseMap.get("updateMeta");
<add> assertEquals("java", updatedMObj.get("oop"));
<add> }
<add>
<ide> } |
|
Java | epl-1.0 | 461b5bf783e157f72cba556f2bf813f4e4a39830 | 0 | oxmcvusd/eclipse-integration-gradle,oxmcvusd/eclipse-integration-gradle,oxmcvusd/eclipse-integration-gradle | /*******************************************************************************
* Copyright (c) 2012 VMWare, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* VMWare, Inc. - initial API and implementation
*******************************************************************************/
package org.springsource.ide.eclipse.gradle.core.wtp;
import static org.eclipse.jst.j2ee.classpathdep.ClasspathDependencyUtil.getDefaultRuntimePath;
import static org.eclipse.jst.j2ee.classpathdep.ClasspathDependencyUtil.isClassFolderEntry;
import static org.eclipse.jst.j2ee.classpathdep.ClasspathDependencyUtil.modifyDependencyPath;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.jdt.core.IClasspathAttribute;
import org.eclipse.jdt.core.IClasspathEntry;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jst.j2ee.classpathdep.UpdateClasspathAttributeUtil;
import org.eclipse.jst.j2ee.project.JavaEEProjectUtilities;
import org.eclipse.wst.common.componentcore.ComponentCore;
import org.eclipse.wst.common.componentcore.internal.util.IModuleConstants;
import org.eclipse.wst.common.componentcore.resources.IVirtualComponent;
import org.eclipse.wst.common.project.facet.core.IFacetedProject;
import org.eclipse.wst.common.project.facet.core.IProjectFacet;
import org.eclipse.wst.common.project.facet.core.IProjectFacetVersion;
import org.eclipse.wst.common.project.facet.core.ProjectFacetsManager;
import org.springsource.ide.eclipse.gradle.core.ClassPath;
import org.springsource.ide.eclipse.gradle.core.GradleCore;
import org.springsource.ide.eclipse.gradle.core.GradleProject;
import org.springsource.ide.eclipse.gradle.core.actions.RefreshDependenciesActionCore;
import org.springsource.ide.eclipse.gradle.core.util.WorkspaceUtil;
/**
* WTPUtilit methods, that have a 'dynamic' implementation. If WTP plugins are installed in
* Eclipse, then they provide a 'real' implementation calling on WTP methods and classes.
* Otherwise a 'null' implementation is provided that doesn't do anything.
*
* @author Kris De Volder
*/
@SuppressWarnings("restriction")
public class WTPUtil {
public static final String JST_J2EE_WEB_CONTAINER = "org.eclipse.jst.j2ee.internal.web.container";
/**
* @return true if WTP is installed
*/
public static boolean isInstalled() {
return implementation.isInstalled();
}
/**
* Returns true if the given project is a WTP project, and WTP is installed.
* @throws CoreException
*/
public static boolean isWTPProject(IProject project) throws CoreException {
return implementation.isWTPProject(project);
}
/**
* Rewrites a raw classpath entry to add the necessary classpath attributes to add the entry
* to the deployment assembly. The classpath entry is assumed to be an entry from
* the given javaProject's classpath.
*/
public static IClasspathEntry addToDeploymentAssembly(IJavaProject jproj, IClasspathEntry e) {
return implementation.addToDeploymentAssembly(jproj, e);
}
/**
* Decides whether a given (resolved) jar dependency should be deployed and adds an extra classpath
* attribute as needed to exclude(or not) the jar.
*/
public static void excludeFromDeployment(IJavaProject javaProject, IPath jarPath, List<IClasspathAttribute> extraAttributes) {
implementation.excludeFromDeployment(javaProject, jarPath, extraAttributes);
}
/**
* Adds the 'Web Libraries' classpath container if the project is a WTP webapp project and doesn't
* already have this container on its classpath.
*/
public static void addWebLibraries(GradleProject project) {
implementation.addWebLibraries(project);
}
/**
* Refresh dependencies for all WTP projects.
*/
public static void refreshAllDependencies() {
implementation.refreshAllDependencies();
}
////////////// implementations are below //////////////////////
private interface IWTPUtil {
IClasspathEntry addToDeploymentAssembly(IJavaProject javaProject, IClasspathEntry e);
void addWebLibraries(GradleProject project);
boolean isInstalled();
void excludeFromDeployment(IJavaProject javaProject, IPath jarPath,
List<IClasspathAttribute> extraAttributes);
boolean isWTPProject(IProject project) throws CoreException;
void refreshAllDependencies();
}
private static class NullImplementation implements IWTPUtil {
public IClasspathEntry addToDeploymentAssembly(IJavaProject javaProject, IClasspathEntry e) {
return e;
}
public boolean isWTPProject(IProject project) {
return false;
}
public void excludeFromDeployment(IJavaProject javaProject,
IPath jarPath, List<IClasspathAttribute> extraAttributes) {
}
public boolean isInstalled() {
return false;
}
public void refreshAllDependencies() {
}
public void addWebLibraries(GradleProject p) {
}
}
private static class DefaultImplementation implements IWTPUtil {
public boolean isWTPProject(IProject project) {
try {
return project!=null && project.hasNature(IModuleConstants.MODULE_NATURE_ID);
} catch (CoreException e) {
GradleCore.log(e);
return false;
}
}
public IClasspathEntry addToDeploymentAssembly(IJavaProject jproj, IClasspathEntry cpeOriginal) {
if (isWTPProject(jproj.getProject())) {
//This code was based on code found in
// org.eclipse.jst.j2ee.internal.ui.AddJavaBuildPathEntriesWizardFragment.handleSelectionChanged()
final IVirtualComponent virtualComponent = ComponentCore.createComponent(jproj.getProject());
final boolean isWebApp = JavaEEProjectUtilities.isDynamicWebProject( jproj.getProject() );
IPath runtimePath = null;
if(virtualComponent == null){
runtimePath = getDefaultRuntimePath( isWebApp, isClassFolderEntry( cpeOriginal ) );
} else {
runtimePath = getDefaultRuntimePath(virtualComponent, cpeOriginal);
}
final IClasspathEntry cpeTagged = modifyDependencyPath( cpeOriginal, runtimePath );
return cpeTagged;
} else {
return cpeOriginal;
}
}
private boolean shouldExcludeFromDeploment(IJavaProject jproj, IPath jarPath) {
String jarName = jarPath.lastSegment();
if (jarName!=null && jarName.endsWith(".jar")) {
DeploymentExclusions exclusions = GradleCore.getInstance().getPreferences().getDeploymentExclusions();
return exclusions.shouldExclude(jarName);
}
return false;
}
public void excludeFromDeployment(IJavaProject jproj, IPath jarPath, List<IClasspathAttribute> extraAttributes) {
if (shouldExcludeFromDeploment(jproj, jarPath)) {
try {
extraAttributes.add(UpdateClasspathAttributeUtil.createNonDependencyAttribute());
} catch (CoreException e) {
GradleCore.log(e);
}
}
}
public boolean isInstalled() {
return true;
}
public void refreshAllDependencies() {
//TODO: in the case where this is currently called (after changings deploy exclusions),
// a full refresh isn't necessary. Just reinitializing the CP container without
//rebuilding the gradle models should suffice (because only need to update classpath attributes).
RefreshDependenciesActionCore.callOn(getAllWTPProjects());
}
private List<IProject> getAllWTPProjects() {
IProject[] projects = WorkspaceUtil.getProjects();
List<IProject> wtpProjects = new ArrayList<IProject>();
for (IProject project : projects) {
if (isWTPProject(project)) {
wtpProjects.add(project);
}
}
return wtpProjects;
}
/* (non-Javadoc)
* @see org.springsource.ide.eclipse.gradle.core.wtp.WTPUtil.IWTPUtil#addWebLibraries(org.eclipse.jdt.core.IJavaProject)
*/
public void addWebLibraries(GradleProject project) {
try {
IJavaProject jproj = project.getJavaProject();
if (isWTPProject(jproj.getProject())) {
if (isWebApp(jproj)) {
ClassPath classpath = project.getClassPath();
classpath.add(JavaCore.newContainerEntry(new Path(JST_J2EE_WEB_CONTAINER)));
classpath.setOn(jproj, new NullProgressMonitor());
}
}
} catch (CoreException e) {
GradleCore.log(e);
}
}
private boolean isWebApp(IJavaProject jproj) throws CoreException {
IFacetedProject fproj = ProjectFacetsManager.create(jproj.getProject());
if (fproj!=null) {
Set<IProjectFacetVersion> facets = fproj.getProjectFacets();
for (IProjectFacetVersion fv : facets) {
IProjectFacet f = fv.getProjectFacet();
return "jst.web".equals(f.getId());
}
}
return false;
}
}
private static final IWTPUtil implementation = createImplementation();
private static IWTPUtil createImplementation() {
try {
Class.forName("org.eclipse.wst.common.componentcore.ComponentCore");
Class.forName("org.eclipse.jst.j2ee.project.facet.IJ2EEFacetConstants");
return new DefaultImplementation();
} catch (ClassNotFoundException e) {
//Most likely reason for the exception is that WTP is not installed (the WTP plugins are declared as
//optional dependencies).
GradleCore.log(e);
return new NullImplementation();
}
}
}
| org.springsource.ide.eclipse.gradle.core/src/org/springsource/ide/eclipse/gradle/core/wtp/WTPUtil.java | /*******************************************************************************
* Copyright (c) 2012 VMWare, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* VMWare, Inc. - initial API and implementation
*******************************************************************************/
package org.springsource.ide.eclipse.gradle.core.wtp;
import static org.eclipse.jst.j2ee.classpathdep.ClasspathDependencyUtil.getDefaultRuntimePath;
import static org.eclipse.jst.j2ee.classpathdep.ClasspathDependencyUtil.isClassFolderEntry;
import static org.eclipse.jst.j2ee.classpathdep.ClasspathDependencyUtil.modifyDependencyPath;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.jdt.core.IClasspathAttribute;
import org.eclipse.jdt.core.IClasspathEntry;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jst.j2ee.classpathdep.UpdateClasspathAttributeUtil;
import org.eclipse.jst.j2ee.project.JavaEEProjectUtilities;
import org.eclipse.wst.common.componentcore.ComponentCore;
import org.eclipse.wst.common.componentcore.internal.util.IModuleConstants;
import org.eclipse.wst.common.componentcore.resources.IVirtualComponent;
import org.eclipse.wst.common.project.facet.core.IFacetedProject;
import org.eclipse.wst.common.project.facet.core.IProjectFacet;
import org.eclipse.wst.common.project.facet.core.IProjectFacetVersion;
import org.eclipse.wst.common.project.facet.core.ProjectFacetsManager;
import org.springsource.ide.eclipse.gradle.core.ClassPath;
import org.springsource.ide.eclipse.gradle.core.GradleCore;
import org.springsource.ide.eclipse.gradle.core.GradleProject;
import org.springsource.ide.eclipse.gradle.core.actions.RefreshDependenciesActionCore;
import org.springsource.ide.eclipse.gradle.core.util.WorkspaceUtil;
import org.eclipse.jst.j2ee.project.facet.IJ2EEFacetConstants;
/**
* WTPUtilit methods, that have a 'dynamic' implementation. If WTP plugins are installed in
* Eclipse, then they provide a 'real' implementation calling on WTP methods and classes.
* Otherwise a 'null' implementation is provided that doesn't do anything.
*
* @author Kris De Volder
*/
@SuppressWarnings("restriction")
public class WTPUtil {
public static final String JST_J2EE_WEB_CONTAINER = "org.eclipse.jst.j2ee.internal.web.container";
/**
* @return true if WTP is installed
*/
public static boolean isInstalled() {
return implementation.isInstalled();
}
/**
* Returns true if the given project is a WTP project, and WTP is installed.
* @throws CoreException
*/
public static boolean isWTPProject(IProject project) throws CoreException {
return implementation.isWTPProject(project);
}
/**
* Rewrites a raw classpath entry to add the necessary classpath attributes to add the entry
* to the deployment assembly. The classpath entry is assumed to be an entry from
* the given javaProject's classpath.
*/
public static IClasspathEntry addToDeploymentAssembly(IJavaProject jproj, IClasspathEntry e) {
return implementation.addToDeploymentAssembly(jproj, e);
}
/**
* Decides whether a given (resolved) jar dependency should be deployed and adds an extra classpath
* attribute as needed to exclude(or not) the jar.
*/
public static void excludeFromDeployment(IJavaProject javaProject, IPath jarPath, List<IClasspathAttribute> extraAttributes) {
implementation.excludeFromDeployment(javaProject, jarPath, extraAttributes);
}
/**
* Adds the 'Web Libraries' classpath container if the project is a WTP webapp project and doesn't
* already have this container on its classpath.
*/
public static void addWebLibraries(GradleProject project) {
implementation.addWebLibraries(project);
}
/**
* Refresh dependencies for all WTP projects.
*/
public static void refreshAllDependencies() {
implementation.refreshAllDependencies();
}
////////////// implementations are below //////////////////////
private interface IWTPUtil {
IClasspathEntry addToDeploymentAssembly(IJavaProject javaProject, IClasspathEntry e);
void addWebLibraries(GradleProject project);
boolean isInstalled();
void excludeFromDeployment(IJavaProject javaProject, IPath jarPath,
List<IClasspathAttribute> extraAttributes);
boolean isWTPProject(IProject project) throws CoreException;
void refreshAllDependencies();
}
private static class NullImplementation implements IWTPUtil {
public IClasspathEntry addToDeploymentAssembly(IJavaProject javaProject, IClasspathEntry e) {
return e;
}
public boolean isWTPProject(IProject project) {
return false;
}
public void excludeFromDeployment(IJavaProject javaProject,
IPath jarPath, List<IClasspathAttribute> extraAttributes) {
}
public boolean isInstalled() {
return false;
}
public void refreshAllDependencies() {
}
public void addWebLibraries(GradleProject p) {
}
}
private static class DefaultImplementation implements IWTPUtil {
public boolean isWTPProject(IProject project) {
try {
return project!=null && project.hasNature(IModuleConstants.MODULE_NATURE_ID);
} catch (CoreException e) {
GradleCore.log(e);
return false;
}
}
public IClasspathEntry addToDeploymentAssembly(IJavaProject jproj, IClasspathEntry cpeOriginal) {
if (isWTPProject(jproj.getProject())) {
//This code was based on code found in
// org.eclipse.jst.j2ee.internal.ui.AddJavaBuildPathEntriesWizardFragment.handleSelectionChanged()
final IVirtualComponent virtualComponent = ComponentCore.createComponent(jproj.getProject());
final boolean isWebApp = JavaEEProjectUtilities.isDynamicWebProject( jproj.getProject() );
IPath runtimePath = null;
if(virtualComponent == null){
runtimePath = getDefaultRuntimePath( isWebApp, isClassFolderEntry( cpeOriginal ) );
} else {
runtimePath = getDefaultRuntimePath(virtualComponent, cpeOriginal);
}
final IClasspathEntry cpeTagged = modifyDependencyPath( cpeOriginal, runtimePath );
return cpeTagged;
} else {
return cpeOriginal;
}
}
private boolean shouldExcludeFromDeploment(IJavaProject jproj, IPath jarPath) {
String jarName = jarPath.lastSegment();
if (jarName!=null && jarName.endsWith(".jar")) {
DeploymentExclusions exclusions = GradleCore.getInstance().getPreferences().getDeploymentExclusions();
return exclusions.shouldExclude(jarName);
}
return false;
}
public void excludeFromDeployment(IJavaProject jproj, IPath jarPath, List<IClasspathAttribute> extraAttributes) {
if (shouldExcludeFromDeploment(jproj, jarPath)) {
try {
extraAttributes.add(UpdateClasspathAttributeUtil.createNonDependencyAttribute());
} catch (CoreException e) {
GradleCore.log(e);
}
}
}
public boolean isInstalled() {
return true;
}
public void refreshAllDependencies() {
//TODO: in the case where this is currently called (after changings deploy exclusions),
// a full refresh isn't necessary. Just reinitializing the CP container without
//rebuilding the gradle models should suffice (because only need to update classpath attributes).
RefreshDependenciesActionCore.callOn(getAllWTPProjects());
}
private List<IProject> getAllWTPProjects() {
IProject[] projects = WorkspaceUtil.getProjects();
List<IProject> wtpProjects = new ArrayList<IProject>();
for (IProject project : projects) {
if (isWTPProject(project)) {
wtpProjects.add(project);
}
}
return wtpProjects;
}
/* (non-Javadoc)
* @see org.springsource.ide.eclipse.gradle.core.wtp.WTPUtil.IWTPUtil#addWebLibraries(org.eclipse.jdt.core.IJavaProject)
*/
public void addWebLibraries(GradleProject project) {
try {
IJavaProject jproj = project.getJavaProject();
if (isWTPProject(jproj.getProject())) {
if (isWebApp(jproj)) {
ClassPath classpath = project.getClassPath();
classpath.add(JavaCore.newContainerEntry(new Path(JST_J2EE_WEB_CONTAINER)));
classpath.setOn(jproj, new NullProgressMonitor());
}
}
} catch (CoreException e) {
GradleCore.log(e);
}
}
private boolean isWebApp(IJavaProject jproj) throws CoreException {
IFacetedProject fproj = ProjectFacetsManager.create(jproj.getProject());
if (fproj!=null) {
Set<IProjectFacetVersion> facets = fproj.getProjectFacets();
for (IProjectFacetVersion fv : facets) {
IProjectFacet f = fv.getProjectFacet();
return "jst.web".equals(f.getId());
}
}
return false;
}
}
private static final IWTPUtil implementation = createImplementation();
private static IWTPUtil createImplementation() {
try {
Class.forName("org.eclipse.wst.common.componentcore.ComponentCore");
return new DefaultImplementation();
} catch (ClassNotFoundException e) {
//Most likely reason for the exception is that WTP is not installed (the WTP plugins are declared as
//optional dependencies).
GradleCore.log(e);
return new NullImplementation();
}
}
}
| Make WTPUtil class more likely to fail when wtp is only partially installed. | org.springsource.ide.eclipse.gradle.core/src/org/springsource/ide/eclipse/gradle/core/wtp/WTPUtil.java | Make WTPUtil class more likely to fail when wtp is only partially installed. | <ide><path>rg.springsource.ide.eclipse.gradle.core/src/org/springsource/ide/eclipse/gradle/core/wtp/WTPUtil.java
<ide> import org.springsource.ide.eclipse.gradle.core.actions.RefreshDependenciesActionCore;
<ide> import org.springsource.ide.eclipse.gradle.core.util.WorkspaceUtil;
<ide>
<del>import org.eclipse.jst.j2ee.project.facet.IJ2EEFacetConstants;
<del>
<del>
<ide> /**
<ide> * WTPUtilit methods, that have a 'dynamic' implementation. If WTP plugins are installed in
<ide> * Eclipse, then they provide a 'real' implementation calling on WTP methods and classes.
<ide> private static IWTPUtil createImplementation() {
<ide> try {
<ide> Class.forName("org.eclipse.wst.common.componentcore.ComponentCore");
<add> Class.forName("org.eclipse.jst.j2ee.project.facet.IJ2EEFacetConstants");
<ide> return new DefaultImplementation();
<ide> } catch (ClassNotFoundException e) {
<ide> //Most likely reason for the exception is that WTP is not installed (the WTP plugins are declared as |
|
Java | apache-2.0 | 04b31f07e1a1518c1a925a9209909edf58304b9f | 0 | fan-wenjie/EasyPR-Java,zuefir/EasyPR-Java | package org.easypr.core;
import org.bytedeco.javacpp.BytePointer;
import org.easypr.util.Convert;
import java.util.Vector;
import static org.bytedeco.javacpp.opencv_core.*;
import static org.bytedeco.javacpp.opencv_imgproc.*;
import static org.bytedeco.javacpp.opencv_highgui.*;
/*
* Created by fanwenjie
* @version 1.1
*/
public class CharsSegment {
final static float DEFAULT_BLUEPERCEMT = 0.3f;
final static float DEFAULT_WHITEPERCEMT = 0.1f;
private int liuDingSize;
private int theMatWidth;
private int colorThreshold;
private float bluePercent;
private float whitePercent;
private boolean isDebug;
public static final int DEFAULT_LIUDING_SIZE = 7;
public static final int DEFAULT_MAT_WIDTH = 136;
public static final int DEFAULT_COLORTHRESHOLD = 150;
//! 是否开启调试模式常量,默认0代表关闭
public static final boolean DEFAULT_DEBUG = false;
//! preprocessChar所用常量
public static final int CHAR_SIZE = 20;
public static final int HORIZONTAL = 1;
public static final int VERTICAL = 0;
public CharsSegment() {
this.liuDingSize = DEFAULT_LIUDING_SIZE;
this.theMatWidth = DEFAULT_MAT_WIDTH;
//!车牌颜色判断参数
this.colorThreshold = DEFAULT_COLORTHRESHOLD;
this.bluePercent = DEFAULT_BLUEPERCEMT;
this.whitePercent = DEFAULT_WHITEPERCEMT;
this.isDebug = DEFAULT_DEBUG;
}
//! 字符分割
public int charsSegment(Mat input, Vector<Mat> resultVec) {
if (input.data().isNull())
return -3;
//判断车牌颜色以此确认threshold方法
int plateType = getPlateType(input);
cvtColor(input, input, CV_RGB2GRAY);
//Threshold input image
Mat img_threshold = new Mat();
if (1 == plateType)
threshold(input, img_threshold, 10, 255, CV_THRESH_OTSU + CV_THRESH_BINARY);
else
threshold(input, img_threshold, 10, 255, CV_THRESH_OTSU + CV_THRESH_BINARY_INV);
if (this.isDebug) {
String str = "image/tmp/debug_char_threshold.jpg";
imwrite(str, img_threshold);
}
//去除车牌上方的柳钉以及下方的横线等干扰
clearLiuDing(img_threshold);
if (this.isDebug) {
String str = "res/image/tmp/debug_char_clearLiuDing.jpg";
imwrite(str, img_threshold);
}
Mat img_contours = new Mat();
img_threshold.copyTo(img_contours);
MatVector contours = new MatVector();
findContours(img_contours,
contours, // a vector of contours
CV_RETR_EXTERNAL, // retrieve the external contours
CV_CHAIN_APPROX_NONE); // all pixels of each contours
//Start to iterate to each contour founded
//Remove patch that are no inside limits of aspect ratio and area.
//将不符合特定尺寸的图块排除出去
Vector<Rect> vecRect = new Vector<Rect>();
for (int i = 0; i < contours.size(); ++i) {
Rect mr = boundingRect(contours.get(i));
if (verifySizes(new Mat(img_threshold, mr)))
vecRect.add(mr);
}
if (vecRect.size() == 0)
return -3;
Vector<Rect> sortedRect = new Vector<Rect>();
//对符合尺寸的图块按照从左到右进行排序
SortRect(vecRect, sortedRect);
int specIndex = 0;
//获得指示城市的特定Rect,如苏A的"A"
specIndex = GetSpecificRect(sortedRect);
if (this.isDebug) {
if (specIndex < sortedRect.size()) {
Mat specMat = new Mat(img_threshold, sortedRect.get(specIndex));
String str = "res/image/tmp/debug_specMat.jpg";
imwrite(str, specMat);
}
}
//根据特定Rect向左反推出中文字符
//这样做的主要原因是根据findContours方法很难捕捉到中文字符的准确Rect,因此仅能
//退过特定算法来指定
Rect chineseRect = new Rect();
if (specIndex < sortedRect.size())
chineseRect = GetChineseRect(sortedRect.get(specIndex));
else
return -3;
if (this.isDebug) {
Mat chineseMat = new Mat(img_threshold, chineseRect);
String str = "res/image/tmp/debug_chineseMat.jpg";
imwrite(str, chineseMat);
}
//新建一个全新的排序Rect
//将中文字符Rect第一个加进来,因为它肯定是最左边的
//其余的Rect只按照顺序去6个,车牌只可能是7个字符!这样可以避免阴影导致的“1”字符
Vector<Rect> newSortedRect = new Vector<Rect>();
newSortedRect.add(chineseRect);
RebuildRect(sortedRect, newSortedRect, specIndex);
if (newSortedRect.size() == 0)
return -3;
for (int i = 0; i < newSortedRect.size(); i++) {
Rect mr = newSortedRect.get(i);
Mat auxRoi = new Mat(img_threshold, mr);
auxRoi = preprocessChar(auxRoi);
if (this.isDebug) {
String str = "image/tmp/debug_char_auxRoi_" + Integer.valueOf(i).toString() + ".jpg";
imwrite(str, auxRoi);
}
resultVec.add(auxRoi);
}
return 0;
}
//! 字符尺寸验证
public Boolean verifySizes(Mat r) {
float aspect = 45.0f / 90.0f;
float charAspect = (float) r.cols() / (float) r.rows();
float error = 0.7f;
float minHeight = 10f;
float maxHeight = 35f;
//We have a different aspect ratio for number 1, and it can be ~0.2
float minAspect = 0.05f;
float maxAspect = aspect + aspect * error;
//area of pixels
float area = countNonZero(r);
//bb area
float bbArea = r.cols() * r.rows();
//% of pixel in area
float percPixels = area / bbArea;
return percPixels <= 1 && charAspect > minAspect && charAspect < maxAspect && r.rows() >= minHeight && r.rows() < maxHeight;
}
//! 字符预处理
public Mat preprocessChar(Mat in) {
//Remap image
int h = in.rows();
int w = in.cols();
int charSize = CHAR_SIZE; //统一每个字符的大小
Mat transformMat = Mat.eye(2, 3, CV_32F).asMat();
int m = (w > h) ? w : h;
transformMat.ptr(0,2).put(Convert.getBytes(((m-w) / 2f)));
transformMat.ptr(1,2).put(Convert.getBytes((m-h)/2f));
Mat warpImage = new Mat(m, m, in.type());
warpAffine(in, warpImage, transformMat, warpImage.size(), INTER_LINEAR, BORDER_CONSTANT, new Scalar(0));
Mat out = new Mat();
resize(warpImage, out, new Size(charSize, charSize));
return out;
}
/*
//! 生成直方图
public Mat ProjectedHistogram(Mat img, int t) {
return null;
}
//! 生成字符的特定特征
public Mat features(Mat in, int sizeData) {
return null;
}*/
//! 直方图均衡,为判断车牌颜色做准备
public Mat histeq(Mat in) {
Mat out = new Mat(in.size(), in.type());
if (in.channels() == 3) {
Mat hsv = new Mat();
MatVector hsvSplit = new MatVector();
cvtColor(in, hsv, CV_BGR2HSV);
split(hsv, hsvSplit);
equalizeHist(hsvSplit.get(2), hsvSplit.get(2));
merge(hsvSplit, hsv);
cvtColor(hsv, out, CV_HSV2BGR);
} else if (in.channels() == 1) {
equalizeHist(in, out);
}
return out;
}
//! 获得车牌颜色
public int getPlateType(Mat input) {
Mat img = new Mat();
input.copyTo(img);
img = histeq(img);
double countBlue = 0;
double countWhite = 0;
int nums = img.rows() * img.cols();
for (int i = 0; i < img.rows(); i++) {
for (int j = 0; j < img.cols(); j++) {
BytePointer pointer = img.ptr(i, j);
int blue = pointer.get(0) & 0xFF;
int green = pointer.get(1) & 0xFF;
int red = pointer.get(2) & 0xFF;
if (blue > this.colorThreshold && green > 10 && red > 10)
countBlue++;
if (blue > this.colorThreshold && green > this.colorThreshold && red > this.colorThreshold)
countWhite++;
}
}
double percentBlue = countBlue / nums;
double percentWhite = countWhite / nums;
if (percentBlue - this.bluePercent > 0 && percentWhite - this.whitePercent > 0)
return 1;
else
return 2;
}
//! 去除影响字符识别的柳钉
public Mat clearLiuDing(Mat img) {
final int x = this.liuDingSize;
Mat jump = Mat.zeros(1, img.rows(), CV_32F).asMat();
for (int i = 0; i < img.rows(); i++) {
int jumpCount = 0;
for (int j = 0; j < img.cols() - 1; j++) {
if (img.ptr(i, j).get() != img.ptr(i, j + 1).get())
jumpCount++;
}
jump.ptr(i).put(Convert.getBytes((float)jumpCount));
}
for (int i = 0; i < img.rows(); i++) {
if (Convert.toFloat(jump.ptr(i))<=x){
for (int j = 0; j < img.cols(); j++) {
img.ptr(i, j).put((byte) 0);
}
}
}
return img;
}
//! 根据特殊车牌来构造猜测中文字符的位置和大小
public Rect GetChineseRect(final Rect rectSpe) {
int height = rectSpe.height();
float newwidth = rectSpe.width() * 1.15f;
int x = rectSpe.x();
int y = rectSpe.y();
int newx = x - (int) (newwidth * 1.15);
newx = newx > 0 ? newx : 0;
Rect a = new Rect(newx, y, (int) newwidth, height);
return a;
}
//! 找出指示城市的字符的Rect,例如苏A7003X,就是A的位置
public int GetSpecificRect(final Vector<Rect> vecRect) {
Vector<Integer> xpositions = new Vector<Integer>();
int maxHeight = 0;
int maxWidth = 0;
for (int i = 0; i < vecRect.size(); i++) {
xpositions.add(vecRect.get(i).x());
if (vecRect.get(i).height() > maxHeight) {
maxHeight = vecRect.get(i).height();
}
if (vecRect.get(i).width() > maxWidth) {
maxWidth = vecRect.get(i).width();
}
}
int specIndex = 0;
for (int i = 0; i < vecRect.size(); i++) {
Rect mr = vecRect.get(i);
int midx = mr.x() + mr.width() / 2;
//如果一个字符有一定的大小,并且在整个车牌的1/7到2/7之间,则是我们要找的特殊车牌
if ((mr.width() > maxWidth * 0.8 || mr.height() > maxHeight * 0.8) &&
(midx < this.theMatWidth * 2 / 7 && midx > this.theMatWidth / 7)) {
specIndex = i;
}
}
return specIndex;
}
//! 这个函数做两个事情
// 1.把特殊字符Rect左边的全部Rect去掉,后面再重建中文字符的位置。
// 2.从特殊字符Rect开始,依次选择6个Rect,多余的舍去。
public int RebuildRect(final Vector<Rect> vecRect, Vector<Rect> outRect, int specIndex) {
//最大只能有7个Rect,减去中文的就只有6个Rect
int count = 6;
for (int i = 0; i < vecRect.size(); i++) {
//将特殊字符左边的Rect去掉,这个可能会去掉中文Rect,不过没关系,我们后面会重建。
if (i < specIndex)
continue;
outRect.add(vecRect.get(i));
if (--count == 0)
break;
}
return 0;
}
//! 将Rect按位置从左到右进行排序
int SortRect(final Vector<Rect> vecRect, Vector<Rect> out) {
Vector<Integer> orderIndex = new Vector<Integer>();
Vector<Integer> xpositions = new Vector<Integer>();
for (int i = 0; i < vecRect.size(); ++i) {
orderIndex.add(i);
xpositions.add(vecRect.get(i).x());
}
float min = xpositions.get(0);
int minIdx;
for (int i = 0; i < xpositions.size(); ++i) {
min = xpositions.get(i);
minIdx = i;
for (int j = i; j < xpositions.size(); ++j) {
if (xpositions.get(j) < min) {
min = xpositions.get(j);
minIdx = j;
}
}
int aux_i = orderIndex.get(i);
int aux_min = orderIndex.get(minIdx);
orderIndex.remove(i);
orderIndex.insertElementAt(aux_min, i);
orderIndex.remove(minIdx);
orderIndex.insertElementAt(aux_i, minIdx);
float aux_xi = xpositions.get(i);
float aux_xmin = xpositions.get(minIdx);
xpositions.remove(i);
xpositions.insertElementAt((int) aux_xmin, i);
xpositions.remove(minIdx);
xpositions.insertElementAt((int) aux_xi, minIdx);
}
for (int i = 0; i < orderIndex.size(); i++)
out.add(vecRect.get(orderIndex.get(i)));
return 0;
}
//! 设置变量
public void setLiuDingSize(int param) {
this.liuDingSize = param;
}
public void setColorThreshold(int param) {
this.colorThreshold = param;
}
public void setBluePercent(float param) {
this.bluePercent = param;
}
public final float getBluePercent() {
return this.bluePercent;
}
public void setWhitePercent(float param) {
this.whitePercent = param;
}
public final float getWhitePercent() {
return this.whitePercent;
}
public boolean getDebug() {
return this.isDebug;
}
public void setDebug(boolean isDebug) {
this.isDebug = isDebug;
}
}
| src/org/easypr/core/CharsSegment.java | package org.easypr.core;
import org.bytedeco.javacpp.BytePointer;
import org.easypr.util.Convert;
import java.util.Vector;
import static org.bytedeco.javacpp.opencv_core.*;
import static org.bytedeco.javacpp.opencv_imgproc.*;
import static org.bytedeco.javacpp.opencv_highgui.*;
/*
* Created by fanwenjie
* @version 1.1
*/
public class CharsSegment {
final static float DEFAULT_BLUEPERCEMT = 0.3f;
final static float DEFAULT_WHITEPERCEMT = 0.1f;
private int liuDingSize;
private int theMatWidth;
private int colorThreshold;
private float bluePercent;
private float whitePercent;
private boolean isDebug;
public static final int DEFAULT_LIUDING_SIZE = 7;
public static final int DEFAULT_MAT_WIDTH = 136;
public static final int DEFAULT_COLORTHRESHOLD = 150;
//! 是否开启调试模式常量,默认0代表关闭
public static final boolean DEFAULT_DEBUG = false;
//! preprocessChar所用常量
public static final int CHAR_SIZE = 20;
public static final int HORIZONTAL = 1;
public static final int VERTICAL = 0;
public CharsSegment() {
this.liuDingSize = DEFAULT_LIUDING_SIZE;
this.theMatWidth = DEFAULT_MAT_WIDTH;
//!车牌颜色判断参数
this.colorThreshold = DEFAULT_COLORTHRESHOLD;
this.bluePercent = DEFAULT_BLUEPERCEMT;
this.whitePercent = DEFAULT_WHITEPERCEMT;
this.isDebug = DEFAULT_DEBUG;
}
//! 字符分割
public int charsSegment(Mat input, Vector<Mat> resultVec) {
if (input.data().isNull())
return -3;
//判断车牌颜色以此确认threshold方法
int plateType = getPlateType(input);
cvtColor(input, input, CV_RGB2GRAY);
//Threshold input image
Mat img_threshold = new Mat();
if (1 == plateType)
threshold(input, img_threshold, 10, 255, CV_THRESH_OTSU + CV_THRESH_BINARY);
else
threshold(input, img_threshold, 10, 255, CV_THRESH_OTSU + CV_THRESH_BINARY_INV);
if (this.isDebug) {
String str = "image/tmp/debug_char_threshold.jpg";
imwrite(str, img_threshold);
}
//去除车牌上方的柳钉以及下方的横线等干扰
clearLiuDing(img_threshold);
if (this.isDebug) {
String str = "image/tmp/debug_char_clearLiuDing.jpg";
imwrite(str, img_threshold);
}
Mat img_contours = new Mat();
img_threshold.copyTo(img_contours);
MatVector contours = new MatVector();
findContours(img_contours,
contours, // a vector of contours
CV_RETR_EXTERNAL, // retrieve the external contours
CV_CHAIN_APPROX_NONE); // all pixels of each contours
//Start to iterate to each contour founded
//Remove patch that are no inside limits of aspect ratio and area.
//将不符合特定尺寸的图块排除出去
Vector<Rect> vecRect = new Vector<Rect>();
for (int i = 0; i < contours.size(); ++i) {
Rect mr = boundingRect(contours.get(i));
if (verifySizes(new Mat(img_threshold, mr)))
vecRect.add(mr);
}
if (vecRect.size() == 0)
return -3;
Vector<Rect> sortedRect = new Vector<Rect>();
//对符合尺寸的图块按照从左到右进行排序
SortRect(vecRect, sortedRect);
int specIndex = 0;
//获得指示城市的特定Rect,如苏A的"A"
specIndex = GetSpecificRect(sortedRect);
if (this.isDebug) {
if (specIndex < sortedRect.size()) {
Mat specMat = new Mat(img_threshold, sortedRect.get(specIndex));
String str = "image/tmp/debug_specMat.jpg";
imwrite(str, specMat);
}
}
//根据特定Rect向左反推出中文字符
//这样做的主要原因是根据findContours方法很难捕捉到中文字符的准确Rect,因此仅能
//退过特定算法来指定
Rect chineseRect = new Rect();
if (specIndex < sortedRect.size())
chineseRect = GetChineseRect(sortedRect.get(specIndex));
else
return -3;
if (this.isDebug) {
Mat chineseMat = new Mat(img_threshold, chineseRect);
String str = "image/tmp/debug_chineseMat.jpg";
imwrite(str, chineseMat);
}
//新建一个全新的排序Rect
//将中文字符Rect第一个加进来,因为它肯定是最左边的
//其余的Rect只按照顺序去6个,车牌只可能是7个字符!这样可以避免阴影导致的“1”字符
Vector<Rect> newSortedRect = new Vector<Rect>();
newSortedRect.add(chineseRect);
RebuildRect(sortedRect, newSortedRect, specIndex);
if (newSortedRect.size() == 0)
return -3;
for (int i = 0; i < newSortedRect.size(); i++) {
Rect mr = newSortedRect.get(i);
Mat auxRoi = new Mat(img_threshold, mr);
auxRoi = preprocessChar(auxRoi);
if (this.isDebug) {
String str = "image/tmp/debug_char_auxRoi_" + Integer.valueOf(i).toString() + ".jpg";
imwrite(str, auxRoi);
}
resultVec.add(auxRoi);
}
return 0;
}
//! 字符尺寸验证
public Boolean verifySizes(Mat r) {
float aspect = 45.0f / 90.0f;
float charAspect = (float) r.cols() / (float) r.rows();
float error = 0.7f;
float minHeight = 10f;
float maxHeight = 35f;
//We have a different aspect ratio for number 1, and it can be ~0.2
float minAspect = 0.05f;
float maxAspect = aspect + aspect * error;
//area of pixels
float area = countNonZero(r);
//bb area
float bbArea = r.cols() * r.rows();
//% of pixel in area
float percPixels = area / bbArea;
return percPixels <= 1 && charAspect > minAspect && charAspect < maxAspect && r.rows() >= minHeight && r.rows() < maxHeight;
}
//! 字符预处理
public Mat preprocessChar(Mat in) {
//Remap image
int h = in.rows();
int w = in.cols();
int charSize = CHAR_SIZE; //统一每个字符的大小
Mat transformMat = Mat.eye(2, 3, CV_32F).asMat();
int m = (w > h) ? w : h;
transformMat.ptr(0,2).put(Convert.getBytes(((m-w) / 2f)));
transformMat.ptr(1,2).put(Convert.getBytes((m-h)/2f));
Mat warpImage = new Mat(m, m, in.type());
warpAffine(in, warpImage, transformMat, warpImage.size(), INTER_LINEAR, BORDER_CONSTANT, new Scalar(0));
Mat out = new Mat();
resize(warpImage, out, new Size(charSize, charSize));
return out;
}
/*
//! 生成直方图
public Mat ProjectedHistogram(Mat img, int t) {
return null;
}
//! 生成字符的特定特征
public Mat features(Mat in, int sizeData) {
return null;
}*/
//! 直方图均衡,为判断车牌颜色做准备
public Mat histeq(Mat in) {
Mat out = new Mat(in.size(), in.type());
if (in.channels() == 3) {
Mat hsv = new Mat();
MatVector hsvSplit = new MatVector();
cvtColor(in, hsv, CV_BGR2HSV);
split(hsv, hsvSplit);
equalizeHist(hsvSplit.get(2), hsvSplit.get(2));
merge(hsvSplit, hsv);
cvtColor(hsv, out, CV_HSV2BGR);
} else if (in.channels() == 1) {
equalizeHist(in, out);
}
return out;
}
//! 获得车牌颜色
public int getPlateType(Mat input) {
Mat img = new Mat();
input.copyTo(img);
img = histeq(img);
double countBlue = 0;
double countWhite = 0;
int nums = img.rows() * img.cols();
for (int i = 0; i < img.rows(); i++) {
for (int j = 0; j < img.cols(); j++) {
BytePointer pointer = img.ptr(i, j);
int blue = pointer.get(0) & 0xFF;
int green = pointer.get(1) & 0xFF;
int red = pointer.get(2) & 0xFF;
if (blue > this.colorThreshold && green > 10 && red > 10)
countBlue++;
if (blue > this.colorThreshold && green > this.colorThreshold && red > this.colorThreshold)
countWhite++;
}
}
double percentBlue = countBlue / nums;
double percentWhite = countWhite / nums;
if (percentBlue - this.bluePercent > 0 && percentWhite - this.whitePercent > 0)
return 1;
else
return 2;
}
//! 去除影响字符识别的柳钉
public Mat clearLiuDing(Mat img) {
final int x = this.liuDingSize;
Mat jump = Mat.zeros(1, img.rows(), CV_32F).asMat();
for (int i = 0; i < img.rows(); i++) {
int jumpCount = 0;
for (int j = 0; j < img.cols() - 1; j++) {
if (img.ptr(i, j).get() != img.ptr(i, j + 1).get())
jumpCount++;
}
jump.ptr(i).put(Convert.getBytes((float)jumpCount));
}
for (int i = 0; i < img.rows(); i++) {
if (Convert.toFloat(jump.ptr(i))<=x){
for (int j = 0; j < img.cols(); j++) {
img.ptr(i, j).put((byte) 0);
}
}
}
return img;
}
//! 根据特殊车牌来构造猜测中文字符的位置和大小
public Rect GetChineseRect(final Rect rectSpe) {
int height = rectSpe.height();
float newwidth = rectSpe.width() * 1.15f;
int x = rectSpe.x();
int y = rectSpe.y();
int newx = x - (int) (newwidth * 1.15);
newx = newx > 0 ? newx : 0;
Rect a = new Rect(newx, y, (int) newwidth, height);
return a;
}
//! 找出指示城市的字符的Rect,例如苏A7003X,就是A的位置
public int GetSpecificRect(final Vector<Rect> vecRect) {
Vector<Integer> xpositions = new Vector<Integer>();
int maxHeight = 0;
int maxWidth = 0;
for (int i = 0; i < vecRect.size(); i++) {
xpositions.add(vecRect.get(i).x());
if (vecRect.get(i).height() > maxHeight) {
maxHeight = vecRect.get(i).height();
}
if (vecRect.get(i).width() > maxWidth) {
maxWidth = vecRect.get(i).width();
}
}
int specIndex = 0;
for (int i = 0; i < vecRect.size(); i++) {
Rect mr = vecRect.get(i);
int midx = mr.x() + mr.width() / 2;
//如果一个字符有一定的大小,并且在整个车牌的1/7到2/7之间,则是我们要找的特殊车牌
if ((mr.width() > maxWidth * 0.8 || mr.height() > maxHeight * 0.8) &&
(midx < this.theMatWidth * 2 / 7 && midx > this.theMatWidth / 7)) {
specIndex = i;
}
}
return specIndex;
}
//! 这个函数做两个事情
// 1.把特殊字符Rect左边的全部Rect去掉,后面再重建中文字符的位置。
// 2.从特殊字符Rect开始,依次选择6个Rect,多余的舍去。
public int RebuildRect(final Vector<Rect> vecRect, Vector<Rect> outRect, int specIndex) {
//最大只能有7个Rect,减去中文的就只有6个Rect
int count = 6;
for (int i = 0; i < vecRect.size(); i++) {
//将特殊字符左边的Rect去掉,这个可能会去掉中文Rect,不过没关系,我们后面会重建。
if (i < specIndex)
continue;
outRect.add(vecRect.get(i));
if (--count == 0)
break;
}
return 0;
}
//! 将Rect按位置从左到右进行排序
int SortRect(final Vector<Rect> vecRect, Vector<Rect> out) {
Vector<Integer> orderIndex = new Vector<Integer>();
Vector<Integer> xpositions = new Vector<Integer>();
for (int i = 0; i < vecRect.size(); ++i) {
orderIndex.add(i);
xpositions.add(vecRect.get(i).x());
}
float min = xpositions.get(0);
int minIdx;
for (int i = 0; i < xpositions.size(); ++i) {
min = xpositions.get(i);
minIdx = i;
for (int j = i; j < xpositions.size(); ++j) {
if (xpositions.get(j) < min) {
min = xpositions.get(j);
minIdx = j;
}
}
int aux_i = orderIndex.get(i);
int aux_min = orderIndex.get(minIdx);
orderIndex.remove(i);
orderIndex.insertElementAt(aux_min, i);
orderIndex.remove(minIdx);
orderIndex.insertElementAt(aux_i, minIdx);
float aux_xi = xpositions.get(i);
float aux_xmin = xpositions.get(minIdx);
xpositions.remove(i);
xpositions.insertElementAt((int) aux_xmin, i);
xpositions.remove(minIdx);
xpositions.insertElementAt((int) aux_xi, minIdx);
}
for (int i = 0; i < orderIndex.size(); i++)
out.add(vecRect.get(orderIndex.get(i)));
return 0;
}
//! 设置变量
public void setLiuDingSize(int param) {
this.liuDingSize = param;
}
public void setColorThreshold(int param) {
this.colorThreshold = param;
}
public void setBluePercent(float param) {
this.bluePercent = param;
}
public final float getBluePercent() {
return this.bluePercent;
}
public void setWhitePercent(float param) {
this.whitePercent = param;
}
public final float getWhitePercent() {
return this.whitePercent;
}
public boolean getDebug() {
return this.isDebug;
}
public void setDebug(boolean isDebug) {
this.isDebug = isDebug;
}
}
| optimus code
| src/org/easypr/core/CharsSegment.java | optimus code | <ide><path>rc/org/easypr/core/CharsSegment.java
<ide>
<ide>
<ide> if (this.isDebug) {
<del> String str = "image/tmp/debug_char_clearLiuDing.jpg";
<add> String str = "res/image/tmp/debug_char_clearLiuDing.jpg";
<ide> imwrite(str, img_threshold);
<ide> }
<ide>
<ide> if (this.isDebug) {
<ide> if (specIndex < sortedRect.size()) {
<ide> Mat specMat = new Mat(img_threshold, sortedRect.get(specIndex));
<del> String str = "image/tmp/debug_specMat.jpg";
<add> String str = "res/image/tmp/debug_specMat.jpg";
<ide> imwrite(str, specMat);
<ide> }
<ide> }
<ide>
<ide> if (this.isDebug) {
<ide> Mat chineseMat = new Mat(img_threshold, chineseRect);
<del> String str = "image/tmp/debug_chineseMat.jpg";
<add> String str = "res/image/tmp/debug_chineseMat.jpg";
<ide> imwrite(str, chineseMat);
<ide> }
<ide> |
|
Java | apache-2.0 | f8cd575498ec39fd327353376267104ac4288481 | 0 | anchela/jackrabbit-oak,apache/jackrabbit-oak,mreutegg/jackrabbit-oak,trekawek/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,apache/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr.security.principal;
import org.apache.jackrabbit.api.security.principal.PrincipalIterator;
import org.apache.jackrabbit.commons.iterator.RangeIteratorAdapter;
import org.apache.jackrabbit.commons.iterator.RangeIteratorDecorator;
import javax.jcr.RangeIterator;
import java.security.Principal;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* PrincipalIteratorAdapter...
*
* TODO: move to jackrabbit-jcr-commons
*/
public class PrincipalIteratorAdapter extends RangeIteratorDecorator
implements PrincipalIterator {
/**
* Static instance of an empty {@link PrincipalIterator}.
*/
public static final PrincipalIteratorAdapter EMPTY = new PrincipalIteratorAdapter((Iterator<? extends Principal>) RangeIteratorAdapter.EMPTY);
/**
* Creates an adapter for the given {@link javax.jcr.RangeIterator}.
*
* @param iterator iterator of {@link java.security.Principal}s
*/
public PrincipalIteratorAdapter(RangeIterator iterator) {
super(iterator);
}
/**
* Creates an adapter for the given {@link java.util.Iterator} of principals.
*
* @param iterator iterator of {@link java.security.Principal}s
*/
public PrincipalIteratorAdapter(Iterator<? extends Principal> iterator) {
super(new RangeIteratorAdapter(iterator));
}
/**
* Creates an iterator for the given collection of {@code Principal}s.
*
* @param collection collection of {@link Principal} objects.
*/
public PrincipalIteratorAdapter(Collection<? extends Principal> collection) {
super(new RangeIteratorAdapter(collection));
}
//----------------------------------------< AccessControlPolicyIterator >---
/**
* Returns the next policy.
*
* @return next policy.
* @throws java.util.NoSuchElementException if there is no next policy.
*/
@Override
public Principal nextPrincipal() throws NoSuchElementException {
return (Principal) next();
}
} | oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/security/principal/PrincipalIteratorAdapter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr.security.principal;
import org.apache.jackrabbit.api.security.principal.PrincipalIterator;
import org.apache.jackrabbit.commons.iterator.RangeIteratorAdapter;
import org.apache.jackrabbit.commons.iterator.RangeIteratorDecorator;
import javax.jcr.RangeIterator;
import java.security.Principal;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* PrincipalIteratorAdapter...
*
* TODO: move to jackrabbit-jcr-commons
*/
public class PrincipalIteratorAdapter extends RangeIteratorDecorator
implements PrincipalIterator {
/**
* Static instance of an empty {@link PrincipalIterator}.
*/
public static final PrincipalIteratorAdapter EMPTY = new PrincipalIteratorAdapter(RangeIteratorAdapter.EMPTY);
/**
* Creates an adapter for the given {@link javax.jcr.RangeIterator}.
*
* @param iterator iterator of {@link java.security.Principal}s
*/
public PrincipalIteratorAdapter(RangeIterator iterator) {
super(iterator);
}
/**
* Creates an adapter for the given {@link java.util.Iterator} of principals.
*
* @param iterator iterator of {@link java.security.Principal}s
*/
public PrincipalIteratorAdapter(Iterator<? extends Principal> iterator) {
super(new RangeIteratorAdapter(iterator));
}
/**
* Creates an iterator for the given collection of {@code Principal}s.
*
* @param collection collection of {@link Principal} objects.
*/
public PrincipalIteratorAdapter(Collection<? extends Principal> collection) {
super(new RangeIteratorAdapter(collection));
}
//----------------------------------------< AccessControlPolicyIterator >---
/**
* Returns the next policy.
*
* @return next policy.
* @throws java.util.NoSuchElementException if there is no next policy.
*/
@Override
public Principal nextPrincipal() throws NoSuchElementException {
return (Principal) next();
}
} | OAK-50 - Implement User Management (WIP)
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1325770 13f79535-47bb-0310-9956-ffa450edef68
| oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/security/principal/PrincipalIteratorAdapter.java | OAK-50 - Implement User Management (WIP) | <ide><path>ak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/security/principal/PrincipalIteratorAdapter.java
<ide> /**
<ide> * Static instance of an empty {@link PrincipalIterator}.
<ide> */
<del> public static final PrincipalIteratorAdapter EMPTY = new PrincipalIteratorAdapter(RangeIteratorAdapter.EMPTY);
<add> public static final PrincipalIteratorAdapter EMPTY = new PrincipalIteratorAdapter((Iterator<? extends Principal>) RangeIteratorAdapter.EMPTY);
<ide>
<ide> /**
<ide> * Creates an adapter for the given {@link javax.jcr.RangeIterator}. |
|
Java | apache-2.0 | 271c7cf01d8464648cf0edd031139149b21cfc60 | 0 | isharac/carbon-apimgt,harsha89/carbon-apimgt,jaadds/carbon-apimgt,malinthaprasan/carbon-apimgt,harsha89/carbon-apimgt,chamilaadhi/carbon-apimgt,tharindu1st/carbon-apimgt,tharindu1st/carbon-apimgt,prasa7/carbon-apimgt,jaadds/carbon-apimgt,prasa7/carbon-apimgt,chamilaadhi/carbon-apimgt,Rajith90/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,chamindias/carbon-apimgt,malinthaprasan/carbon-apimgt,chamindias/carbon-apimgt,uvindra/carbon-apimgt,praminda/carbon-apimgt,bhathiya/carbon-apimgt,chamilaadhi/carbon-apimgt,jaadds/carbon-apimgt,tharindu1st/carbon-apimgt,bhathiya/carbon-apimgt,tharikaGitHub/carbon-apimgt,malinthaprasan/carbon-apimgt,ruks/carbon-apimgt,uvindra/carbon-apimgt,ruks/carbon-apimgt,uvindra/carbon-apimgt,chamindias/carbon-apimgt,pubudu538/carbon-apimgt,nuwand/carbon-apimgt,pubudu538/carbon-apimgt,Rajith90/carbon-apimgt,fazlan-nazeem/carbon-apimgt,isharac/carbon-apimgt,ruks/carbon-apimgt,jaadds/carbon-apimgt,isharac/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,pubudu538/carbon-apimgt,tharikaGitHub/carbon-apimgt,harsha89/carbon-apimgt,Rajith90/carbon-apimgt,prasa7/carbon-apimgt,nuwand/carbon-apimgt,bhathiya/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,praminda/carbon-apimgt,fazlan-nazeem/carbon-apimgt,prasa7/carbon-apimgt,tharikaGitHub/carbon-apimgt,isharac/carbon-apimgt,nuwand/carbon-apimgt,chamindias/carbon-apimgt,fazlan-nazeem/carbon-apimgt,chamilaadhi/carbon-apimgt,uvindra/carbon-apimgt,wso2/carbon-apimgt,pubudu538/carbon-apimgt,wso2/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,tharikaGitHub/carbon-apimgt,ruks/carbon-apimgt,nuwand/carbon-apimgt,wso2/carbon-apimgt,bhathiya/carbon-apimgt,wso2/carbon-apimgt,malinthaprasan/carbon-apimgt,tharindu1st/carbon-apimgt,praminda/carbon-apimgt,Rajith90/carbon-apimgt,harsha89/carbon-apimgt,fazlan-nazeem/carbon-apimgt | /*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.impl;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.wso2.carbon.CarbonConstants;
import org.wso2.carbon.apimgt.api.APIConsumer;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.APIMgtResourceNotFoundException;
import org.wso2.carbon.apimgt.api.LoginPostExecutor;
import org.wso2.carbon.apimgt.api.NewPostLoginExecutor;
import org.wso2.carbon.apimgt.api.WorkflowResponse;
import org.wso2.carbon.apimgt.api.model.API;
import org.wso2.carbon.apimgt.api.model.APIIdentifier;
import org.wso2.carbon.apimgt.api.model.APIKey;
import org.wso2.carbon.apimgt.api.model.APIRating;
import org.wso2.carbon.apimgt.api.model.AccessTokenInfo;
import org.wso2.carbon.apimgt.api.model.AccessTokenRequest;
import org.wso2.carbon.apimgt.api.model.Application;
import org.wso2.carbon.apimgt.api.model.ApplicationConstants;
import org.wso2.carbon.apimgt.api.model.ApplicationKeysDTO;
import org.wso2.carbon.apimgt.api.model.Documentation;
import org.wso2.carbon.apimgt.api.model.KeyManager;
import org.wso2.carbon.apimgt.api.model.OAuthAppRequest;
import org.wso2.carbon.apimgt.api.model.OAuthApplicationInfo;
import org.wso2.carbon.apimgt.api.model.Scope;
import org.wso2.carbon.apimgt.api.model.SubscribedAPI;
import org.wso2.carbon.apimgt.api.model.Subscriber;
import org.wso2.carbon.apimgt.api.model.SubscriptionResponse;
import org.wso2.carbon.apimgt.api.model.Tag;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.api.model.TierPermission;
import org.wso2.carbon.apimgt.impl.caching.CacheInvalidator;
import org.wso2.carbon.apimgt.impl.dto.ApplicationRegistrationWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.ApplicationWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.SubscriptionWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.TierPermissionDTO;
import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO;
import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.apimgt.impl.utils.APIMWSDLReader;
import org.wso2.carbon.apimgt.impl.utils.APINameComparator;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.impl.utils.APIVersionComparator;
import org.wso2.carbon.apimgt.impl.utils.ApplicationUtils;
import org.wso2.carbon.apimgt.impl.workflow.AbstractApplicationRegistrationWorkflowExecutor;
import org.wso2.carbon.apimgt.impl.workflow.GeneralWorkflowResponse;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowConstants;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowException;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutor;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutorFactory;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowStatus;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import org.wso2.carbon.governance.api.generic.GenericArtifactManager;
import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact;
import org.wso2.carbon.governance.api.util.GovernanceUtils;
import org.wso2.carbon.registry.common.TermData;
import org.wso2.carbon.registry.core.ActionConstants;
import org.wso2.carbon.registry.core.Association;
import org.wso2.carbon.registry.core.Registry;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.config.RegistryContext;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.pagination.PaginationContext;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import org.wso2.carbon.user.api.AuthorizationManager;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.mgt.stub.UserAdminStub;
import org.wso2.carbon.user.mgt.stub.UserAdminUserAdminException;
import org.wso2.carbon.utils.CarbonUtils;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.cache.Caching;
import javax.wsdl.Definition;
/**
* This class provides the core API store functionality. It is implemented in a very
* self-contained and 'pure' manner, without taking requirements like security into account,
* which are subject to frequent change. Due to this 'pure' nature and the significance of
* the class to the overall API management functionality, the visibility of the class has
* been reduced to package level. This means we can still use it for internal purposes and
* possibly even extend it, but it's totally off the limits of the users. Users wishing to
* programmatically access this functionality should use one of the extensions of this
* class which is visible to them. These extensions may add additional features like
* security to this class.
*/
class APIConsumerImpl extends AbstractAPIManager implements APIConsumer {
private static final Log log = LogFactory.getLog(APIConsumerImpl.class);
public static final char COLON_CHAR = ':';
public static final String EMPTY_STRING = "";
public static final String ENVIRONMENT_NAME = "environmentName";
public static final String ENVIRONMENT_TYPE = "environmentType";
public static final String API_NAME = "apiName";
public static final String API_VERSION = "apiVersion";
public static final String API_PROVIDER = "apiProvider";
/* Map to Store APIs against Tag */
private ConcurrentMap<String, Set<API>> taggedAPIs = new ConcurrentHashMap<String, Set<API>>();
private boolean isTenantModeStoreView;
private String requestedTenant;
private boolean isTagCacheEnabled;
private Set<Tag> tagSet;
private long tagCacheValidityTime;
private volatile long lastUpdatedTime;
private volatile long lastUpdatedTimeForTagApi;
private final Object tagCacheMutex = new Object();
private final Object tagWithAPICacheMutex = new Object();
protected APIMRegistryService apimRegistryService;
protected String userNameWithoutChange;
public APIConsumerImpl() throws APIManagementException {
super();
readTagCacheConfigs();
}
public APIConsumerImpl(String username, APIMRegistryService apimRegistryService) throws APIManagementException {
super(username);
userNameWithoutChange = username;
readTagCacheConfigs();
this.apimRegistryService = apimRegistryService;
}
private void readTagCacheConfigs() {
APIManagerConfiguration config = getAPIManagerConfiguration();
String enableTagCache = config.getFirstProperty(APIConstants.STORE_TAG_CACHE_DURATION);
if (enableTagCache == null) {
isTagCacheEnabled = false;
tagCacheValidityTime = 0;
} else {
isTagCacheEnabled = true;
tagCacheValidityTime = Long.parseLong(enableTagCache);
}
}
@Override
public Subscriber getSubscriber(String subscriberId) throws APIManagementException {
Subscriber subscriber = null;
try {
subscriber = apiMgtDAO.getSubscriber(subscriberId);
} catch (APIManagementException e) {
handleException("Failed to get Subscriber", e);
}
return subscriber;
}
/**
* Returns the set of APIs with the given tag from the taggedAPIs Map
*
* @param tagName The name of the tag
* @return Set of {@link API} with the given tag
* @throws APIManagementException
*/
@Override
public Set<API> getAPIsWithTag(String tagName, String requestedTenantDomain) throws APIManagementException {
/* We keep track of the lastUpdatedTime of the TagCache to determine its freshness.
*/
long lastUpdatedTimeAtStart = lastUpdatedTimeForTagApi;
long currentTimeAtStart = System.currentTimeMillis();
if(isTagCacheEnabled && ( (currentTimeAtStart- lastUpdatedTimeAtStart) < tagCacheValidityTime)){
if (taggedAPIs != null && taggedAPIs.containsKey(tagName)) {
return taggedAPIs.get(tagName);
}
}else{
synchronized (tagWithAPICacheMutex) {
lastUpdatedTimeForTagApi = System.currentTimeMillis();
taggedAPIs = new ConcurrentHashMap<String, Set<API>>();
}
}
boolean isTenantMode = requestedTenantDomain != null && !"null".equalsIgnoreCase(requestedTenantDomain);
this.isTenantModeStoreView = isTenantMode;
if (requestedTenantDomain != null && !"null".equals(requestedTenantDomain)) {
this.requestedTenant = requestedTenantDomain;
}
Registry userRegistry;
boolean isTenantFlowStarted = false;
Set<API> apisWithTag = null;
try {
//start the tenant flow prior to loading registry
if (requestedTenant != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(requestedTenant)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(requestedTenantDomain);
}
if ((isTenantMode && this.tenantDomain == null) ||
(isTenantMode && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(requestedTenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
apisWithTag = getAPIsWithTag(userRegistry, tagName);
/* Add the APIs against the tag name */
if (!apisWithTag.isEmpty()) {
if (taggedAPIs.containsKey(tagName)) {
for (API api : apisWithTag) {
taggedAPIs.get(tagName).add(api);
}
} else {
taggedAPIs.putIfAbsent(tagName, apisWithTag);
}
}
} catch (RegistryException e) {
handleException("Failed to get api by the tag", e);
} catch (UserStoreException e) {
handleException("Failed to get api by the tag", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
return apisWithTag;
}
protected void setUsernameToThreadLocalCarbonContext(String username) {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(username);
}
protected UserRegistry getGovernanceUserRegistry(int tenantId) throws RegistryException {
return ServiceReferenceHolder.getInstance().getRegistryService().
getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId);
}
protected int getTenantId(String requestedTenantDomain) throws UserStoreException {
return ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(requestedTenantDomain);
}
/**
* Returns the set of APIs with the given tag from the taggedAPIs Map.
*
* @param tag The name of the tag
* @param start The starting index of the return result set
* @param end The end index of the return result set
* @return A {@link Map} of APIs(between the given indexes) and the total number indicating all the available
* APIs count
* @throws APIManagementException
*/
@Override
public Map<String, Object> getPaginatedAPIsWithTag(String tag, int start, int end, String tenantDomain) throws APIManagementException {
List<API> apiList = new ArrayList<API>();
Set<API> resultSet = new TreeSet<API>(new APIVersionComparator());
Map<String, Object> results = new HashMap<String, Object>();
Set<API> taggedAPISet = this.getAPIsWithTag(tag,tenantDomain);
if (taggedAPISet != null) {
if (taggedAPISet.size() < end) {
end = taggedAPISet.size();
}
int totalLength;
apiList.addAll(taggedAPISet);
totalLength = apiList.size();
if (totalLength <= ((start + end) - 1)) {
end = totalLength;
} else {
end = start + end;
}
for (int i = start; i < end; i++) {
resultSet.add(apiList.get(i));
}
results.put("apis", resultSet);
results.put("length", taggedAPISet.size());
} else {
results.put("apis", null);
results.put("length", 0);
}
return results;
}
/**
* Returns the set of APIs with the given tag, retrieved from registry
*
* @param registry - Current registry; tenant/SuperTenant
* @param tag - The tag name
* @return A {@link Set} of {@link API} objects.
* @throws APIManagementException
*/
private Set<API> getAPIsWithTag(Registry registry, String tag)
throws APIManagementException {
Set<API> apiSet = new TreeSet<API>(new APINameComparator());
try {
List<GovernanceArtifact> genericArtifacts =
GovernanceUtils.findGovernanceArtifacts(getSearchQuery(APIConstants.TAG_SEARCH_TYPE_PREFIX2 + tag), registry,
APIConstants.API_RXT_MEDIA_TYPE);
for (GovernanceArtifact genericArtifact : genericArtifacts) {
try {
String apiStatus = APIUtil.getLcStateFromArtifact(genericArtifact);
if (genericArtifact != null && (APIConstants.PUBLISHED.equals(apiStatus)
|| APIConstants.PROTOTYPED.equals(apiStatus))) {
API api = APIUtil.getAPI(genericArtifact);
if (api != null) {
apiSet.add(api);
}
}
} catch (RegistryException e) {
log.warn("User is not authorized to get an API with tag " + tag, e);
}
}
} catch (RegistryException e) {
handleException("Failed to get API for tag " + tag, e);
}
return apiSet;
}
/**
* The method to get APIs to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
public Set<API> getAllPublishedAPIs(String tenantDomain) throws APIManagementException {
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
userRegistry = getGovernanceUserRegistry(tenantId);
} else {
userRegistry = registry;
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts();
if (genericArtifacts == null || genericArtifacts.length == 0) {
return apiSortedSet;
}
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
for (GenericArtifact artifact : genericArtifacts) {
// adding the API provider can mark the latest API .
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
try {
checkAccessControlPermission(api.getId());
} catch (APIManagementException e) {
// This is a second level of filter to get apis based on access control and visibility.
// Hence log is set as debug and continued.
if(log.isDebugEnabled()) {
log.debug("User is not authorized to view the api " + api.getId().getApiName(), e);
}
continue;
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
return apiSortedSet;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
return apiVersionsSortedSet;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving APIs for store. User : " + PrivilegedCarbonContext
.getThreadLocalCarbonContext().getUsername();
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
}
return apiSortedSet;
}
/**
* The method to get APIs to Store view *
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
@Deprecated
public Map<String,Object> getAllPaginatedPublishedAPIs(String tenantDomain,int start,int end)
throws APIManagementException {
Boolean displayAPIsWithMultipleStatus = false;
try {
if (tenantDomain != null) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
}finally {
endTenantFlow();
}
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
//Create the search attribute map
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
} else{
return getAllPaginatedAPIs(tenantDomain, start, end);
}
Map<String, Object> result = new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength = 0;
try {
Registry userRegistry;
boolean isTenantMode = (tenantDomain != null);
if ((isTenantMode && this.tenantDomain == null) ||
(isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting paginated published API.");
continue;
}
// adding the API provider can mark the latest API .
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis", apiVersionsSortedSet);
result.put("totalLength", totalLength);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all Published APIs.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
/**
* Regenerate consumer secret.
*
* @param clientId For which consumer key we need to regenerate consumer secret.
* @return New consumer secret.
* @throws APIManagementException This is the custom exception class for API management.
*/
public String renewConsumerSecret(String clientId) throws APIManagementException {
// Create Token Request with parameters provided from UI.
AccessTokenRequest tokenRequest = new AccessTokenRequest();
tokenRequest.setClientId(clientId);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
return keyManager.getNewApplicationConsumerSecret(tokenRequest);
}
/**
* The method to get APIs in any of the given LC status array
*
* @return Map<String, Object> API result set with pagination information
* @throws APIManagementException
*/
@Override
public Map<String, Object> getAllPaginatedAPIsByStatus(String tenantDomain,
int start, int end, final String[] apiStatus, boolean returnAPITags) throws APIManagementException {
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
boolean isMore = false;
String criteria = APIConstants.LCSTATE_SEARCH_TYPE_KEY;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
String paginationLimit = getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
criteria = criteria + APIUtil.getORBasedSearchCriteria(apiStatus);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
if (apiStatus != null && apiStatus.length > 0) {
List<GovernanceArtifact> genericArtifacts = GovernanceUtils.findGovernanceArtifacts
(getSearchQuery(criteria), userRegistry, APIConstants.API_RXT_MEDIA_TYPE);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.size() == 0) {
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist so we cannot determine the total API count without incurring a
// performance hit
--totalLength; // Remove the additional 1 we added earlier when setting max pagination limit
}
int tempLength = 0;
for (GovernanceArtifact artifact : genericArtifacts) {
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//log and continue since we want to load the rest of the APIs.
log.error("Error while loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME),
e);
}
if (api != null) {
if (returnAPITags) {
String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId());
Set<String> tags = new HashSet<String>();
org.wso2.carbon.registry.core.Tag[] tag = registry.getTags(artifactPath);
for (org.wso2.carbon.registry.core.Tag tag1 : tag) {
tags.add(tag1.getTagName());
}
api.addTags(tags);
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
tempLength++;
if (tempLength >= totalLength) {
break;
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis", apiVersionsSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all paginated APIs by status.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
/**
* The method to get APIs by given status to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
@Deprecated
public Map<String, Object> getAllPaginatedAPIsByStatus(String tenantDomain,
int start, int end, final String apiStatus, boolean returnAPITags) throws APIManagementException {
try {
if (tenantDomain != null) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
}finally {
endTenantFlow();
}
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (APIConstants.PROTOTYPED.equals(apiStatus)) {
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(apiStatus);
}});
} else {
if (!displayAPIsWithMultipleStatus) {
//Create the search attribute map
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(apiStatus);
}});
} else {
return getAllPaginatedAPIs(tenantDomain, start, end);
}
}
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
boolean isMore = false;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
String paginationLimit = getAPIManagerConfiguration()
.getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength=PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist so we cannot determine the total API count without incurring a
// performance hit
--totalLength; // Remove the additional 1 we added earlier when setting max pagination limit
}
int tempLength=0;
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting all paginated APIs by status.");
continue;
}
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//log and continue since we want to load the rest of the APIs.
log.error("Error while loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME),
e);
}
if (api != null) {
if (returnAPITags) {
String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId());
Set<String> tags = new HashSet<String>();
org.wso2.carbon.registry.core.Tag[] tag = registry.getTags(artifactPath);
for (org.wso2.carbon.registry.core.Tag tag1 : tag) {
tags.add(tag1.getTagName());
}
api.addTags(tags);
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
tempLength++;
if (tempLength >= totalLength){
break;
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis",apiVersionsSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving APIs by status.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
/**
* Re-generates the access token.
* @param oldAccessToken Token to be revoked
* @param clientId Consumer Key for the Application
* @param clientSecret Consumer Secret for the Application
* @param validityTime Desired Validity time for the token
* @param jsonInput Additional parameters if Authorization server needs any.
* @return Renewed Access Token.
* @throws APIManagementException
*/
@Override
public AccessTokenInfo renewAccessToken(String oldAccessToken, String clientId, String clientSecret,
String validityTime, String
requestedScopes[], String jsonInput) throws APIManagementException {
// Create Token Request with parameters provided from UI.
AccessTokenRequest tokenRequest = new AccessTokenRequest();
tokenRequest.setClientId(clientId);
tokenRequest.setClientSecret(clientSecret);
tokenRequest.setValidityPeriod(Long.parseLong(validityTime));
tokenRequest.setTokenToRevoke(oldAccessToken);
tokenRequest.setScope(requestedScopes);
try {
// Populating additional parameters.
tokenRequest = ApplicationUtils.populateTokenRequest(jsonInput, tokenRequest);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
JSONObject appLogObject = new JSONObject();
appLogObject.put("Re-Generated Keys for application with client Id", clientId);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyManager.getNewApplicationAccessToken(tokenRequest);
} catch (APIManagementException e) {
log.error("Error while re-generating AccessToken", e);
throw e;
}
}
/**
* The method to get All PUBLISHED and DEPRECATED APIs, to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Deprecated
public Map<String,Object> getAllPaginatedAPIs(String tenantDomain,int start,int end) throws APIManagementException {
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
boolean noPublishedAPIs = false;
if (artifactManager != null) {
//Create the search attribute map for PUBLISHED APIs
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
noPublishedAPIs = true;
}
int publishedAPICount;
if (genericArtifacts != null) {
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting all paginated APIs.");
continue;
}
// adding the API provider can mark the latest API .
// String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS);
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
// key = api.getId().getProviderName() + ":" + api.getId().getApiName() + ":" + api.getId()
// .getVersion();
multiVersionedAPIs.add(api);
}
}
}
}
if (!displayMultipleVersions) {
publishedAPICount = latestPublishedAPIs.size();
} else {
publishedAPICount = multiVersionedAPIs.size();
}
if ((start + end) > publishedAPICount) {
if (publishedAPICount > 0) {
/*Starting to retrieve DEPRECATED APIs*/
start = 0;
/* publishedAPICount is always less than end*/
end = end - publishedAPICount;
} else {
start = start - totalLength;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
//Create the search attribute map for DEPRECATED APIs
Map<String, List<String>> listMapForDeprecatedAPIs = new HashMap<String, List<String>>();
listMapForDeprecatedAPIs.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.DEPRECATED);
}});
GenericArtifact[] genericArtifactsForDeprecatedAPIs = artifactManager.findGenericArtifacts(listMapForDeprecatedAPIs);
totalLength = totalLength + PaginationContext.getInstance().getLength();
if ((genericArtifactsForDeprecatedAPIs == null || genericArtifactsForDeprecatedAPIs.length == 0) && noPublishedAPIs) {
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
return result;
}
if (genericArtifactsForDeprecatedAPIs != null) {
for (GenericArtifact artifact : genericArtifactsForDeprecatedAPIs) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting deprecated APIs.");
continue;
}
// adding the API provider can mark the latest API .
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
}
}
if (!displayMultipleVersions) {
for (API api : latestPublishedAPIs.values()) {
apiSortedSet.add(api);
}
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis",apiVersionsSortedSet);
result.put("totalLength",totalLength);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all paginated APIs.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
}finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
@Override
public Set<API> getTopRatedAPIs(int limit) throws APIManagementException {
int returnLimit = 0;
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
try {
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage = "Artifact manager is null when retrieving top rated APIs.";
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts();
if (genericArtifacts == null || genericArtifacts.length == 0) {
return apiSortedSet;
}
for (GenericArtifact genericArtifact : genericArtifacts) {
String status = APIUtil.getLcStateFromArtifact(genericArtifact);
if (APIConstants.PUBLISHED.equals(status)) {
String artifactPath = genericArtifact.getPath();
float rating = registry.getAverageRating(artifactPath);
if (rating > APIConstants.TOP_TATE_MARGIN && (returnLimit < limit)) {
returnLimit++;
API api = APIUtil.getAPI(genericArtifact, registry);
if (api != null) {
apiSortedSet.add(api);
}
}
}
}
} catch (RegistryException e) {
handleException("Failed to get top rated API", e);
}
return apiSortedSet;
}
/**
* Get the recently added APIs set
*
* @param limit no limit. Return everything else, limit the return list to specified value.
* @return Set<API>
* @throws APIManagementException
*/
@Override
public Set<API> getRecentlyAddedAPIs(int limit, String tenantDomain)
throws APIManagementException {
SortedSet<API> recentlyAddedAPIs = new TreeSet<API>(new APINameComparator());
SortedSet<API> recentlyAddedAPIsWithMultipleVersions = new TreeSet<API>(new APIVersionComparator());
Registry userRegistry;
APIManagerConfiguration config = getAPIManagerConfiguration();
boolean isRecentlyAddedAPICacheEnabled =
Boolean.parseBoolean(config.getFirstProperty(APIConstants.API_STORE_RECENTLY_ADDED_API_CACHE_ENABLE));
PrivilegedCarbonContext.startTenantFlow();
boolean isTenantFlowStarted ;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
isTenantFlowStarted = true;
} else {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true);
isTenantFlowStarted = true;
}
try {
boolean isTenantMode = (tenantDomain != null);
if ((isTenantMode && this.tenantDomain == null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant based store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
isTenantFlowStarted = true;
userRegistry = getGovernanceUserRegistry(tenantId);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
isTenantFlowStarted = true;
}
if (isRecentlyAddedAPICacheEnabled) {
boolean isStatusChanged = false;
Set<API> recentlyAddedAPI = (Set<API>) Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME).get(username + COLON_CHAR + tenantDomain);
if (recentlyAddedAPI != null) {
for (API api : recentlyAddedAPI) {
try {
if (!APIConstants.PUBLISHED.equalsIgnoreCase(userRegistry.get(APIUtil.getAPIPath(api.getId())).getProperty(APIConstants.API_STATUS))) {
isStatusChanged = true;
break;
}
} catch (Exception ex) {
log.error("Error while checking API status for APP " + api.getId().getApiName() + '-' +
api.getId().getVersion(), ex);
}
}
if (!isStatusChanged) {
return recentlyAddedAPI;
}
}
}
PaginationContext.init(0, limit, APIConstants.REGISTRY_ARTIFACT_SEARCH_DESC_ORDER,
APIConstants.CREATED_DATE, Integer.MAX_VALUE);
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
listMap.put(APIConstants.STORE_VIEW_ROLES, getUserRoleList());
String searchCriteria = APIConstants.LCSTATE_SEARCH_KEY + "= (" + APIConstants.PUBLISHED + ")";
//Find UUID
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGovernanceArtifacts(getSearchQuery(searchCriteria));
SortedSet<API> allAPIs = new TreeSet<API>(new APINameComparator());
for (GenericArtifact artifact : genericArtifacts) {
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//just log and continue since we want to go through the other APIs as well.
log.error("Error loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME), e);
}
if (api != null) {
allAPIs.add(api);
}
}
if (!APIUtil.isAllowDisplayMultipleVersions()) {
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
Comparator<API> versionComparator = new APIVersionComparator();
String key;
for (API api : allAPIs) {
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same
// name, make sure this one has a higher version
// number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
}
recentlyAddedAPIs.addAll(latestPublishedAPIs.values());
if (isRecentlyAddedAPICacheEnabled) {
Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME)
.put(username + COLON_CHAR + tenantDomain, allAPIs);
}
return recentlyAddedAPIs;
} else {
recentlyAddedAPIsWithMultipleVersions.addAll(allAPIs);
if (isRecentlyAddedAPICacheEnabled) {
Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME)
.put(username + COLON_CHAR + tenantDomain, allAPIs);
}
return recentlyAddedAPIsWithMultipleVersions;
}
} else {
String errorMessage = "Artifact manager is null when retrieving recently added APIs for tenant domain "
+ tenantDomain;
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
if (isTenantFlowStarted) {
endTenantFlow();
}
}
return recentlyAddedAPIs;
}
@Override
public Set<Tag> getAllTags(String requestedTenantDomain) throws APIManagementException {
this.isTenantModeStoreView = (requestedTenantDomain != null);
if(requestedTenantDomain != null){
this.requestedTenant = requestedTenantDomain;
}
/* We keep track of the lastUpdatedTime of the TagCache to determine its freshness.
*/
long lastUpdatedTimeAtStart = lastUpdatedTime;
long currentTimeAtStart = System.currentTimeMillis();
if(isTagCacheEnabled && ( (currentTimeAtStart- lastUpdatedTimeAtStart) < tagCacheValidityTime)){
if(tagSet != null){
return tagSet;
}
}
TreeSet<Tag> tempTagSet = new TreeSet<Tag>(new Comparator<Tag>() {
@Override
public int compare(Tag o1, Tag o2) {
return o1.getName().compareTo(o2.getName());
}
});
Registry userRegistry = null;
boolean isTenantFlowStarted = false;
String tagsQueryPath = null;
try {
tagsQueryPath = RegistryConstants.QUERIES_COLLECTION_PATH + "/tag-summary";
Map<String, String> params = new HashMap<String, String>();
params.put(RegistryConstants.RESULT_TYPE_PROPERTY_NAME, RegistryConstants.TAG_SUMMARY_RESULT_TYPE);
//as a tenant, I'm browsing my own Store or I'm browsing a Store of another tenant..
if ((this.isTenantModeStoreView && this.tenantDomain==null) || (this.isTenantModeStoreView && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant based store anonymous mode
int tenantId = getTenantId(this.requestedTenant);
userRegistry = ServiceReferenceHolder.getInstance().getRegistryService().
getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId);
} else {
userRegistry = registry;
}
Map<String, Tag> tagsData = new HashMap<String, Tag>();
try {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(((UserRegistry)userRegistry).getUserName());
if (requestedTenant != null ) {
isTenantFlowStarted = startTenantFlowForTenantDomain(requestedTenant);
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(((UserRegistry)userRegistry).getUserName());
}
Map <String, List<String>> criteriaPublished = new HashMap<String, List<String>>();
criteriaPublished.put(APIConstants.LCSTATE_SEARCH_KEY, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
//rxt api media type
List<TermData> termsPublished = GovernanceUtils
.getTermDataList(criteriaPublished, APIConstants.API_OVERVIEW_TAG,
APIConstants.API_RXT_MEDIA_TYPE, true);
if(termsPublished != null){
for(TermData data : termsPublished){
tempTagSet.add(new Tag(data.getTerm(), (int)data.getFrequency()));
}
}
Map<String, List<String>> criteriaPrototyped = new HashMap<String, List<String>>();
criteriaPrototyped.put(APIConstants.LCSTATE_SEARCH_KEY, new ArrayList<String>() {{
add(APIConstants.PROTOTYPED);
}});
//rxt api media type
List<TermData> termsPrototyped = GovernanceUtils
.getTermDataList(criteriaPrototyped, APIConstants.API_OVERVIEW_TAG,
APIConstants.API_RXT_MEDIA_TYPE, true);
if(termsPrototyped != null){
for(TermData data : termsPrototyped){
tempTagSet.add(new Tag(data.getTerm(), (int)data.getFrequency()));
}
}
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
synchronized (tagCacheMutex) {
lastUpdatedTime = System.currentTimeMillis();
this.tagSet = tempTagSet;
}
} catch (RegistryException e) {
try {
//Before a tenant login to the store or publisher at least one time,
//a registry exception is thrown when the tenant store is accessed in anonymous mode.
//This fix checks whether query resource available in the registry. If not
// give a warn.
if (userRegistry != null && !userRegistry.resourceExists(tagsQueryPath)) {
log.warn("Failed to retrieve tags query resource at " + tagsQueryPath);
return tagSet == null ? Collections.EMPTY_SET : tagSet;
}
} catch (RegistryException e1) {
// Even if we should ignore this exception, we are logging this as a warn log.
// The reason is that, this error happens when we try to add some additional logs in an error
// scenario and it does not affect the execution path.
log.warn("Unable to execute the resource exist method for tags query resource path : " + tagsQueryPath,
e1);
}
handleException("Failed to get all the tags", e);
} catch (UserStoreException e) {
handleException("Failed to get all the tags", e);
}
return tagSet;
}
@Override
public Set<Tag> getTagsWithAttributes(String tenantDomain) throws APIManagementException {
// Fetch the all the tags first.
Set<Tag> tags = getAllTags(tenantDomain);
// For each and every tag get additional attributes from the registry.
String descriptionPathPattern = APIConstants.TAGS_INFO_ROOT_LOCATION + "/%s/description.txt";
String thumbnailPathPattern = APIConstants.TAGS_INFO_ROOT_LOCATION + "/%s/thumbnail.png";
//if the tenantDomain is not specified super tenant domain is used
if (StringUtils.isBlank(tenantDomain)) {
try {
tenantDomain = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getSuperTenantDomain();
} catch (org.wso2.carbon.user.core.UserStoreException e) {
handleException("Cannot get super tenant domain name", e);
}
}
//get the registry instance related to the tenant domain
UserRegistry govRegistry = null;
try {
int tenantId = getTenantId(tenantDomain);
RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService();
govRegistry = registryService.getGovernanceSystemRegistry(tenantId);
} catch (UserStoreException e) {
handleException("Cannot get tenant id for tenant domain name:" + tenantDomain, e);
} catch (RegistryException e) {
handleException("Cannot get registry for tenant domain name:" + tenantDomain, e);
}
if (govRegistry != null) {
for (Tag tag : tags) {
// Get the description.
Resource descriptionResource = null;
String descriptionPath = String.format(descriptionPathPattern, tag.getName());
try {
if (govRegistry.resourceExists(descriptionPath)) {
descriptionResource = govRegistry.get(descriptionPath);
}
} catch (RegistryException e) {
//warn and proceed to the next tag
log.warn(String.format("Error while querying the existence of the description for the tag '%s'",
tag.getName()), e);
}
// The resource is assumed to be a byte array since its the content
// of a text file.
if (descriptionResource != null) {
try {
String description = new String((byte[]) descriptionResource.getContent(),
Charset.defaultCharset());
tag.setDescription(description);
} catch (ClassCastException e) {
//added warnings as it can then proceed to load rest of resources/tags
log.warn(String.format("Cannot cast content of %s to byte[]", descriptionPath), e);
} catch (RegistryException e) {
//added warnings as it can then proceed to load rest of resources/tags
log.warn(String.format("Cannot read content of %s", descriptionPath), e);
}
}
// Checks whether the thumbnail exists.
String thumbnailPath = String.format(thumbnailPathPattern, tag.getName());
try {
boolean isThumbnailExists = govRegistry.resourceExists(thumbnailPath);
tag.setThumbnailExists(isThumbnailExists);
if (isThumbnailExists) {
tag.setThumbnailUrl(APIUtil.getRegistryResourcePathForUI(
APIConstants.RegistryResourceTypesForUI.TAG_THUMBNAIL, tenantDomain, thumbnailPath));
}
} catch (RegistryException e) {
//warn and then proceed to load rest of tags
log.warn(String.format("Error while querying the existence of %s", thumbnailPath), e);
}
}
}
return tags;
}
@Override
public void rateAPI(APIIdentifier apiId, APIRating rating,
String user) throws APIManagementException {
apiMgtDAO.addRating(apiId, rating.getRating(), user);
}
@Override
public void removeAPIRating(APIIdentifier apiId, String user) throws APIManagementException {
apiMgtDAO.removeAPIRating(apiId, user);
}
@Override
public int getUserRating(APIIdentifier apiId, String user) throws APIManagementException {
return apiMgtDAO.getUserRating(apiId, user);
}
@Override
public Set<API> getPublishedAPIsByProvider(String providerId, int limit)
throws APIManagementException {
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
try {
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId;
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage =
"Artifact manager is null when retrieving published APIs by provider ID " + providerId;
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION);
if (associations.length < limit || limit == -1) {
limit = associations.length;
}
for (int i = 0; i < limit; i++) {
Association association = associations[i];
String apiPath = association.getDestinationPath();
Resource resource = registry.get(apiPath);
String apiArtifactId = resource.getUUID();
if (apiArtifactId != null) {
GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId);
// check the API status
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
} else {
throw new GovernanceException("artifact id is null of " + apiPath);
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
return apiSortedSet;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
return apiVersionsSortedSet;
}
} catch (RegistryException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
}
return null;
}
@Override
public Set<API> getPublishedAPIsByProvider(String providerId, String loggedUsername, int limit, String apiOwner,
String apiBizOwner) throws APIManagementException {
try {
Boolean allowMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean showAllAPIs = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
String providerDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerId));
int tenantId = getTenantId(providerDomain);
final Registry registry = ServiceReferenceHolder.getInstance().
getRegistryService().getGovernanceSystemRegistry(tenantId);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry,
APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage =
"Artifact manager is null when retrieving all published APIs by provider ID " + providerId;
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
int publishedAPICount = 0;
Map<String, API> apiCollection = new HashMap<String, API>();
if(apiBizOwner != null && !apiBizOwner.isEmpty()){
try {
final String bizOwner = apiBizOwner;
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_BUSS_OWNER, new ArrayList<String>() {{
add(bizOwner);
}});
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username);
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
if(genericArtifacts != null && genericArtifacts.length > 0){
for(GenericArtifact artifact : genericArtifacts){
if (publishedAPICount >= limit) {
break;
}
if(isCandidateAPI(artifact.getPath(), loggedUsername, artifactManager, tenantId, showAllAPIs,
allowMultipleVersions, apiOwner, providerId, registry, apiCollection)){
publishedAPICount += 1;
}
}
}
} catch (GovernanceException e) {
log.error("Error while finding APIs by business owner " + apiBizOwner, e);
return null;
}
}
else{
String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId;
Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION);
for (Association association : associations) {
if (publishedAPICount >= limit) {
break;
}
String apiPath = association.getDestinationPath();
if(isCandidateAPI(apiPath, loggedUsername, artifactManager, tenantId, showAllAPIs,
allowMultipleVersions, apiOwner, providerId, registry, apiCollection)){
publishedAPICount += 1;
}
}
}
return new HashSet<API>(apiCollection.values());
} catch (RegistryException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
} catch (org.wso2.carbon.user.core.UserStoreException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
} catch (UserStoreException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
}
}
private boolean isCandidateAPI(String apiPath, String loggedUsername, GenericArtifactManager artifactManager,
int tenantId, boolean showAllAPIs, boolean allowMultipleVersions,
String apiOwner, String providerId, Registry registry, Map<String, API> apiCollection)
throws UserStoreException, RegistryException, APIManagementException {
AuthorizationManager manager = ServiceReferenceHolder.getInstance().getRealmService().
getTenantUserRealm(tenantId).getAuthorizationManager();
Comparator<API> versionComparator = new APIVersionComparator();
Resource resource;
String path = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(),
APIUtil.getMountedPath(RegistryContext.getBaseInstance(),
RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) +
apiPath);
boolean checkAuthorized;
String userNameWithoutDomain = loggedUsername;
if (!loggedUsername.isEmpty() && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(super.tenantDomain)) {
String[] nameParts = loggedUsername.split("@");
userNameWithoutDomain = nameParts[0];
}
int loggedInUserTenantDomain = -1;
if(!StringUtils.isEmpty(loggedUsername)) {
loggedInUserTenantDomain = APIUtil.getTenantId(loggedUsername);
}
if (loggedUsername.isEmpty()) {
// Anonymous user is viewing.
checkAuthorized = manager.isRoleAuthorized(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET);
} else if (tenantId != loggedInUserTenantDomain) {
//Cross tenant scenario
providerId = APIUtil.replaceEmailDomainBack(providerId);
String[] nameParts = providerId.split("@");
String provideNameWithoutDomain = nameParts[0];
checkAuthorized = manager.isUserAuthorized(provideNameWithoutDomain, path, ActionConstants.GET);
} else {
// Some user is logged in also user and api provider tenant domain are same.
checkAuthorized = manager.isUserAuthorized(userNameWithoutDomain, path, ActionConstants.GET);
}
String apiArtifactId = null;
if (checkAuthorized) {
resource = registry.get(apiPath);
apiArtifactId = resource.getUUID();
}
if (apiArtifactId != null) {
GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId);
// check the API status
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!showAllAPIs) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
String apiVisibility = api.getVisibility();
if(!StringUtils.isEmpty(apiVisibility) && !APIConstants.API_GLOBAL_VISIBILITY.equalsIgnoreCase(apiVisibility)) {
String providerDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerId));
String loginUserDomain = MultitenantUtils.getTenantDomain(loggedUsername);
if(!StringUtils.isEmpty(providerDomain) && !StringUtils.isEmpty(loginUserDomain)
&& !providerDomain.equals(loginUserDomain)){
return false;
}
}
// apiOwner is the value coming from front end and compared against the API instance
if (apiOwner != null && !apiOwner.isEmpty()) {
if (APIUtil.replaceEmailDomainBack(providerId).equals(APIUtil.replaceEmailDomainBack(apiOwner)) &&
api.getApiOwner() != null && !api.getApiOwner().isEmpty() &&
!APIUtil.replaceEmailDomainBack(apiOwner)
.equals(APIUtil.replaceEmailDomainBack(api.getApiOwner()))) {
return false; // reject remote APIs when local admin user's API selected
} else if (!APIUtil.replaceEmailDomainBack(providerId).equals(APIUtil.replaceEmailDomainBack(apiOwner)) &&
!APIUtil.replaceEmailDomainBack(apiOwner)
.equals(APIUtil.replaceEmailDomainBack(api.getApiOwner()))) {
return false; // reject local admin's APIs when remote API selected
}
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!allowMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = apiCollection.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
apiCollection.put(key, api);
return true;
}
} else {
// We haven't seen this API before
apiCollection.put(key, api);
return true;
}
} else { //If allow showing multiple versions of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName() + COLON_CHAR + api.getId()
.getVersion();
//we're not really interested in the key, so generate one for the sake of adding this element to
//the map.
key = key + '_' + apiCollection.size();
apiCollection.put(key, api);
return true;
}
}
}
return false;
}
@Override
public Map<String,Object> searchPaginatedAPIs(String searchTerm, String searchType, String requestedTenantDomain,int start,int end, boolean isLazyLoad)
throws APIManagementException {
Map<String,Object> result = new HashMap<String,Object>();
boolean isTenantFlowStarted = false;
try {
boolean isTenantMode=(requestedTenantDomain != null);
if (isTenantMode && !org.wso2.carbon.base.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(requestedTenantDomain)) {
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(requestedTenantDomain, true);
} else {
requestedTenantDomain = org.wso2.carbon.base.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(requestedTenantDomain, true);
}
Registry userRegistry;
int tenantIDLocal = 0;
String userNameLocal = this.username;
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant store anonymous mode
tenantIDLocal = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(requestedTenantDomain);
userRegistry = ServiceReferenceHolder.getInstance().
getRegistryService().getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantIDLocal);
userNameLocal = CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME;
} else {
userRegistry = this.registry;
tenantIDLocal = tenantId;
}
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(userNameLocal);
if (APIConstants.DOCUMENTATION_SEARCH_TYPE_PREFIX.equalsIgnoreCase(searchType)) {
Map<Documentation, API> apiDocMap =
APIUtil.searchAPIsByDoc(userRegistry, tenantIDLocal, userNameLocal, searchTerm,
APIConstants.STORE_CLIENT);
result.put("apis", apiDocMap);
/*Pagination for Document search results is not supported yet, hence length is sent as end-start*/
if (apiDocMap.isEmpty()) {
result.put("length", 0);
} else {
result.put("length", end-start);
}
}
else if ("subcontext".equalsIgnoreCase(searchType)) {
result = APIUtil.searchAPIsByURLPattern(userRegistry, searchTerm, start,end); ;
}else {
result=searchPaginatedAPIs(userRegistry, searchTerm, searchType,start,end,isLazyLoad);
}
} catch (Exception e) {
handleException("Failed to Search APIs", e);
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
return result;
}
/**
* Pagination API search based on solr indexing
*
* @param registry
* @param searchTerm
* @param searchType
* @return
* @throws APIManagementException
*/
public Map<String,Object> searchPaginatedAPIs(Registry registry, String searchTerm, String searchType,int start,int end, boolean limitAttributes) throws APIManagementException {
SortedSet<API> apiSet = new TreeSet<API>(new APINameComparator());
List<API> apiList = new ArrayList<API>();
searchTerm = searchTerm.trim();
Map<String,Object> result=new HashMap<String, Object>();
int totalLength=0;
boolean isMore = false;
String criteria=APIConstants.API_OVERVIEW_NAME;
try {
String paginationLimit = getAPIManagerConfiguration()
.getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
if (artifactManager != null) {
if (APIConstants.API_PROVIDER.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_PROVIDER;
searchTerm = searchTerm.replaceAll("@", "-AT-");
} else if (APIConstants.API_VERSION_LABEL.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_VERSION;
} else if (APIConstants.API_CONTEXT.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_CONTEXT;
} else if (APIConstants.API_DESCRIPTION.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_DESCRIPTION;
} else if (APIConstants.API_TAG.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_TAG;
}
//Create the search attribute map for PUBLISHED APIs
final String searchValue = searchTerm;
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(criteria, new ArrayList<String>() {{
add(searchValue);
}});
boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
//This is due to take only the published APIs from the search if there is no need to return APIs with
//multiple status. This is because pagination is breaking when we do a another filtering with the API Status
if (!displayAPIsWithMultipleStatus) {
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
}
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
boolean isFound = true;
if (genericArtifacts == null || genericArtifacts.length == 0) {
if (APIConstants.API_OVERVIEW_PROVIDER.equals(criteria)) {
genericArtifacts = searchAPIsByOwner(artifactManager, searchValue);
if (genericArtifacts == null || genericArtifacts.length == 0) {
isFound = false;
}
}
else {
isFound = false;
}
}
if (!isFound) {
result.put("apis", apiSet);
result.put("length", 0);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist, cannot determine total API count without incurring perf hit
--totalLength; // Remove the additional 1 added earlier when setting max pagination limit
}
int tempLength =0;
for (GenericArtifact artifact : genericArtifacts) {
String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS);
if (APIUtil.isAllowDisplayAPIsWithMultipleStatus()) {
if (APIConstants.PROTOTYPED.equals(status) || APIConstants.PUBLISHED.equals(status)
|| APIConstants.DEPRECATED.equals(status)) {
API resultAPI;
if (limitAttributes) {
resultAPI = APIUtil.getAPI(artifact);
} else {
resultAPI = APIUtil.getAPI(artifact, registry);
}
if (resultAPI != null) {
apiList.add(resultAPI);
}
}
} else {
if (APIConstants.PROTOTYPED.equals(status) || APIConstants.PUBLISHED.equals(status)) {
API resultAPI;
if (limitAttributes) {
resultAPI = APIUtil.getAPI(artifact);
} else {
resultAPI = APIUtil.getAPI(artifact, registry);
}
if (resultAPI != null) {
apiList.add(resultAPI);
}
}
}
// Ensure the APIs returned matches the length, there could be an additional API
// returned due incrementing the pagination limit when getting from registry
tempLength++;
if (tempLength >= totalLength){
break;
}
}
apiSet.addAll(apiList);
}
} catch (RegistryException e) {
handleException("Failed to search APIs with type", e);
}
result.put("apis",apiSet);
result.put("length",totalLength);
result.put("isMore", isMore);
return result;
}
private GenericArtifact[] searchAPIsByOwner(GenericArtifactManager artifactManager, final String searchValue) throws GovernanceException {
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_OWNER, new ArrayList<String>() {
{
add(searchValue);
}
});
return artifactManager.findGenericArtifacts(listMap);
}
/**
*This method will delete application key mapping table and application registration table.
*@param applicationName application Name
*@param tokenType Token Type.
*@param groupId group id.
*@param userName user name.
*@return
*@throws APIManagementException
*/
@Override
public void cleanUpApplicationRegistration(String applicationName ,String tokenType ,String groupId ,String
userName) throws APIManagementException{
Application application = apiMgtDAO.getApplicationByName(applicationName, userName, groupId);
String applicationId = String.valueOf(application.getId());
apiMgtDAO.deleteApplicationRegistration(applicationId , tokenType);
apiMgtDAO.deleteApplicationKeyMappingByApplicationIdAndType(applicationId, tokenType);
String consumerKey = apiMgtDAO.getConsumerkeyByApplicationIdAndKeyType(applicationId,tokenType);
}
/**
*
* @param jsonString this string will contain oAuth app details
* @param userName user name of logged in user.
* @param clientId this is the consumer key of oAuthApplication
* @param applicationName this is the APIM appication name.
* @param keyType
* @param tokenType this is theApplication Token Type. This can be either default or jwt.
* @return
* @throws APIManagementException
*/
@Override
public Map<String, Object> mapExistingOAuthClient(String jsonString, String userName, String clientId,
String applicationName, String keyType, String tokenType)
throws APIManagementException {
String callBackURL = null;
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, clientId, callBackURL,
"default",
jsonString, tokenType);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
// Checking if clientId is mapped with another application.
if (apiMgtDAO.isMappingExistsforConsumerKey(clientId)) {
String message = "Consumer Key " + clientId + " is used for another Application.";
log.error(message);
throw new APIManagementException(message);
}
log.debug("Client ID not mapped previously with another application.");
//createApplication on oAuthorization server.
OAuthApplicationInfo oAuthApplication = keyManager.mapOAuthApplication(oauthAppRequest);
//Do application mapping with consumerKey.
apiMgtDAO.createApplicationKeyTypeMappingForManualClients(keyType, applicationName, userName, clientId);
AccessTokenInfo tokenInfo;
if (oAuthApplication.getJsonString().contains(APIConstants.GRANT_TYPE_CLIENT_CREDENTIALS)) {
AccessTokenRequest tokenRequest = ApplicationUtils.createAccessTokenRequest(oAuthApplication, null);
tokenInfo = keyManager.getNewApplicationAccessToken(tokenRequest);
} else {
tokenInfo = new AccessTokenInfo();
tokenInfo.setAccessToken("");
tokenInfo.setValidityPeriod(0L);
String[] noScopes = new String[] {"N/A"};
tokenInfo.setScope(noScopes);
oAuthApplication.addParameter("tokenScope", Arrays.toString(noScopes));
}
Map<String, Object> keyDetails = new HashMap<String, Object>();
if (tokenInfo != null) {
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oAuthApplication.getClientId());
keyDetails.put("consumerSecret", oAuthApplication.getParameter("client_secret"));
keyDetails.put("appDetails", oAuthApplication.getJsonString());
return keyDetails;
}
/** returns the SubscribedAPI object which is related to the subscriptionId
*
* @param subscriptionId subscription id
* @return
* @throws APIManagementException
*/
@Override
public SubscribedAPI getSubscriptionById(int subscriptionId) throws APIManagementException {
return apiMgtDAO.getSubscriptionById(subscriptionId);
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber) throws APIManagementException {
return getSubscribedAPIs(subscriber, null);
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String groupingId) throws APIManagementException {
Set<SubscribedAPI> originalSubscribedAPIs;
Set<SubscribedAPI> subscribedAPIs = new HashSet<SubscribedAPI>();
try {
originalSubscribedAPIs = apiMgtDAO.getSubscribedAPIs(subscriber, groupingId);
if (originalSubscribedAPIs != null && !originalSubscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : originalSubscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi.getTier().getName());
subscribedAPIs.add(subscribedApi);
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName(), e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String applicationName, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getSubscribedAPIs(subscriber, applicationName, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationName, e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, String applicationName,
int startSubIndex, int endSubIndex, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getPaginatedSubscribedAPIs(subscriber, applicationName, startSubIndex,
endSubIndex, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
// subscribedAPIs.add(subscribedApi);
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationName, e);
}
return subscribedAPIs;
}
public Integer getSubscriptionCount(Subscriber subscriber,String applicationName,String groupingId)
throws APIManagementException {
return apiMgtDAO.getSubscriptionCount(subscriber,applicationName,groupingId);
}
@Override
public Set<APIIdentifier> getAPIByConsumerKey(String accessToken) throws APIManagementException {
try {
return apiMgtDAO.getAPIByConsumerKey(accessToken);
} catch (APIManagementException e) {
handleException("Error while obtaining API from API key", e);
}
return null;
}
@Override
public boolean isSubscribed(APIIdentifier apiIdentifier, String userId)
throws APIManagementException {
boolean isSubscribed;
try {
isSubscribed = apiMgtDAO.isSubscribed(apiIdentifier, userId);
} catch (APIManagementException e) {
String msg = "Failed to check if user(" + userId + ") has subscribed to " + apiIdentifier;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
return isSubscribed;
}
@Override
public SubscriptionResponse addSubscription(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
API api = getAPI(identifier);
WorkflowResponse workflowResponse = null;
int subscriptionId;
String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(userId);
if (APIConstants.PUBLISHED.equals(api.getStatus())) {
subscriptionId = apiMgtDAO.addSubscription(identifier, api.getContext(), applicationId,
APIConstants.SubscriptionStatus.ON_HOLD, tenantAwareUsername);
boolean isTenantFlowStarted = false;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
String applicationName = apiMgtDAO.getApplicationNameFromId(applicationId);
try {
WorkflowExecutor addSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
SubscriptionWorkflowDTO workflowDTO = new SubscriptionWorkflowDTO();
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
workflowDTO.setExternalWorkflowReference(addSubscriptionWFExecutor.generateUUID());
workflowDTO.setWorkflowReference(String.valueOf(subscriptionId));
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
workflowDTO.setCallbackUrl(addSubscriptionWFExecutor.getCallbackURL());
workflowDTO.setApiName(identifier.getApiName());
workflowDTO.setApiContext(api.getContext());
workflowDTO.setApiVersion(identifier.getVersion());
workflowDTO.setApiProvider(identifier.getProviderName());
workflowDTO.setTierName(identifier.getTier());
workflowDTO.setApplicationName(apiMgtDAO.getApplicationNameFromId(applicationId));
workflowDTO.setApplicationId(applicationId);
workflowDTO.setSubscriber(userId);
workflowResponse = addSubscriptionWFExecutor.execute(workflowDTO);
} catch (WorkflowException e) {
//If the workflow execution fails, roll back transaction by removing the subscription entry.
apiMgtDAO.removeSubscriptionById(subscriptionId);
log.error("Could not execute Workflow", e);
throw new APIManagementException("Could not execute Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (APIUtil.isAPIGatewayKeyCacheEnabled()) {
invalidateCachedKeys(applicationId);
}
//to handle on-the-fly subscription rejection (and removal of subscription entry from the database)
//the response should have {"Status":"REJECTED"} in the json payload for this to work.
boolean subscriptionRejected = false;
String subscriptionStatus = null;
String subscriptionUUID = "";
if (workflowResponse != null && workflowResponse.getJSONPayload() != null
&& !workflowResponse.getJSONPayload().isEmpty()) {
try {
JSONObject wfResponseJson = (JSONObject) new JSONParser().parse(workflowResponse.getJSONPayload());
if (APIConstants.SubscriptionStatus.REJECTED.equals(wfResponseJson.get("Status"))) {
subscriptionRejected = true;
subscriptionStatus = APIConstants.SubscriptionStatus.REJECTED;
}
} catch (ParseException e) {
log.error('\'' + workflowResponse.getJSONPayload() + "' is not a valid JSON.", e);
}
}
if (!subscriptionRejected) {
SubscribedAPI addedSubscription = getSubscriptionById(subscriptionId);
subscriptionStatus = addedSubscription.getSubStatus();
subscriptionUUID = addedSubscription.getUUID();
JSONObject subsLogObject = new JSONObject();
subsLogObject.put(APIConstants.AuditLogConstants.API_NAME, identifier.getApiName());
subsLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName());
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_ID, applicationId);
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, applicationName);
subsLogObject.put(APIConstants.AuditLogConstants.TIER, identifier.getTier());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.SUBSCRIPTION, subsLogObject.toString(),
APIConstants.AuditLogConstants.CREATED, this.username);
workflowResponse = new GeneralWorkflowResponse();
}
if (log.isDebugEnabled()) {
String logMessage = "API Name: " + identifier.getApiName() + ", API Version " + identifier.getVersion()
+ ", Subscription Status: " + subscriptionStatus + " subscribe by " + userId
+ " for app " + applicationName;
log.debug(logMessage);
}
return new SubscriptionResponse(subscriptionStatus, subscriptionUUID, workflowResponse);
} else {
throw new APIMgtResourceNotFoundException("Subscriptions not allowed on APIs in the state: " +
api.getStatus());
}
}
@Override
public SubscriptionResponse addSubscription(APIIdentifier identifier, String userId, int applicationId,
String groupId) throws APIManagementException {
boolean isValid = validateApplication(userId, applicationId, groupId);
if (!isValid) {
log.error("Application " + applicationId + " is not accessible to user " + userId);
throw new APIManagementException("Application is not accessible to user " + userId);
}
return addSubscription(identifier, userId, applicationId);
}
/**
* Check whether the application is accessible to the specified user
* @param userId username
* @param applicationId application ID
* @param groupId GroupId list of the application
* @return true if the application is accessible by the specified user
*/
private boolean validateApplication(String userId, int applicationId, String groupId) {
try {
return apiMgtDAO.isAppAllowed(applicationId, userId, groupId);
} catch (APIManagementException e) {
log.error("Error occurred while getting user group id for user: " + userId, e);
}
return false;
}
@Override
public String getSubscriptionStatusById(int subscriptionId) throws APIManagementException {
return apiMgtDAO.getSubscriptionStatusById(subscriptionId);
}
@Override
public void removeSubscription(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
boolean isTenantFlowStarted = false;
String providerTenantDomain = MultitenantUtils.getTenantDomain(APIUtil.
replaceEmailDomainBack(identifier.getProviderName()));
String applicationName = apiMgtDAO.getApplicationNameFromId(applicationId);
try {
if (providerTenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME
.equals(providerTenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(providerTenantDomain, true);
isTenantFlowStarted = true;
}
API api = getAPI(identifier);
SubscriptionWorkflowDTO workflowDTO;
WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
WorkflowExecutor removeSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION);
String workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(identifier, applicationId);
// in a normal flow workflowExtRef is null when workflows are not enabled
if (workflowExtRef == null) {
workflowDTO = new SubscriptionWorkflowDTO();
} else {
workflowDTO = (SubscriptionWorkflowDTO) apiMgtDAO.retrieveWorkflow(workflowExtRef);
// set tiername to the workflowDTO only when workflows are enabled
SubscribedAPI subscription = apiMgtDAO
.getSubscriptionById(Integer.parseInt(workflowDTO.getWorkflowReference()));
workflowDTO.setTierName(subscription.getTier().getName());
}
workflowDTO.setApiProvider(identifier.getProviderName());
workflowDTO.setApiContext(api.getContext());
workflowDTO.setApiName(identifier.getApiName());
workflowDTO.setApiVersion(identifier.getVersion());
workflowDTO.setApplicationName(applicationName);
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
workflowDTO.setExternalWorkflowReference(workflowExtRef);
workflowDTO.setSubscriber(userId);
workflowDTO.setCallbackUrl(removeSubscriptionWFExecutor.getCallbackURL());
workflowDTO.setApplicationId(applicationId);
String status = apiMgtDAO.getSubscriptionStatus(identifier, applicationId);
if (APIConstants.SubscriptionStatus.ON_HOLD.equals(status)) {
try {
createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the deletion process
log.warn("Failed to clean pending subscription approval task");
}
}
// update attributes of the new remove workflow to be created
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setExternalWorkflowReference(removeSubscriptionWFExecutor.generateUUID());
removeSubscriptionWFExecutor.execute(workflowDTO);
JSONObject subsLogObject = new JSONObject();
subsLogObject.put(APIConstants.AuditLogConstants.API_NAME, identifier.getApiName());
subsLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName());
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_ID, applicationId);
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, applicationName);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.SUBSCRIPTION, subsLogObject.toString(),
APIConstants.AuditLogConstants.DELETED, this.username);
} catch (WorkflowException e) {
String errorMsg = "Could not execute Workflow, " + WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION +
" for apiID " + identifier.getApiName();
handleException(errorMsg, e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (APIUtil.isAPIGatewayKeyCacheEnabled()) {
invalidateCachedKeys(applicationId);
}
if (log.isDebugEnabled()) {
String logMessage = "API Name: " + identifier.getApiName() + ", API Version " +
identifier.getVersion() + " subscription removed from app " + applicationName + " by " + userId;
log.debug(logMessage);
}
}
@Override
public void removeSubscription(APIIdentifier identifier, String userId, int applicationId, String groupId) throws
APIManagementException {
//check application is viewable to logged user
boolean isValid = validateApplication(userId, applicationId, groupId);
if (!isValid) {
log.error("Application " + applicationId + " is not accessible to user " + userId);
throw new APIManagementException("Application is not accessible to user " + userId);
}
removeSubscription(identifier, userId, applicationId);
}
/**
* Removes a subscription specified by SubscribedAPI object
*
* @param subscription SubscribedAPI object
* @throws APIManagementException
*/
@Override
public void removeSubscription(SubscribedAPI subscription) throws APIManagementException {
String uuid = subscription.getUUID();
SubscribedAPI subscribedAPI = apiMgtDAO.getSubscriptionByUUID(uuid);
if (subscribedAPI != null) {
Application application = subscribedAPI.getApplication();
APIIdentifier identifier = subscribedAPI.getApiId();
String userId = application.getSubscriber().getName();
removeSubscription(identifier, userId, application.getId());
if (log.isDebugEnabled()) {
String appName = application.getName();
String logMessage =
"API Name: " + identifier.getApiName() + ", API Version " + identifier.getVersion() +
" subscription (uuid : " + uuid + ") removed from app " + appName;
log.debug(logMessage);
}
} else {
throw new APIManagementException("Subscription for UUID:" + uuid +" does not exist.");
}
}
/**
*
* @param applicationId Application ID related cache keys to be cleared
* @throws APIManagementException
*/
private void invalidateCachedKeys(int applicationId) throws APIManagementException {
CacheInvalidator.getInstance().invalidateCacheForApp(applicationId);
}
@Override
public void removeSubscriber(APIIdentifier identifier, String userId)
throws APIManagementException {
throw new UnsupportedOperationException("Unsubscribe operation is not yet implemented");
}
@Override
public void updateSubscriptions(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
API api = getAPI(identifier);
apiMgtDAO.updateSubscriptions(identifier, api.getContext(), applicationId, userId);
}
@Override
public void addComment(APIIdentifier identifier, String commentText, String user) throws APIManagementException {
apiMgtDAO.addComment(identifier, commentText, user);
}
@Override
public org.wso2.carbon.apimgt.api.model.Comment[] getComments(APIIdentifier identifier)
throws APIManagementException {
return apiMgtDAO.getComments(identifier);
}
/**
* Add a new Application from the store.
* @param application - {@link org.wso2.carbon.apimgt.api.model.Application}
* @param userId - {@link String}
* @return {@link String}
*/
@Override
public int addApplication(Application application, String userId)
throws APIManagementException {
if (application.getName() != null && (application.getName().length() != application.getName().trim().length())) {
handleApplicationNameContainSpacesException("Application name " +
"cannot contain leading or trailing white spaces");
}
String regex = "^[a-zA-Z0-9 ._-]*$";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(application.getName());
if (!matcher.find()) {
handleApplicationNameContainsInvalidCharactersException("Application name contains invalid characters");
}
if (APIUtil.isApplicationExist(userId, application.getName(), application.getGroupId())) {
handleResourceAlreadyExistsException(
"A duplicate application already exists by the name - " + application.getName());
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(application.getCallbackUrl())) {
application.setCallbackUrl(null);
}
int applicationId = apiMgtDAO.addApplication(application, userId);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.CREATED, this.username);
boolean isTenantFlowStarted = false;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
try {
WorkflowExecutor appCreationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
ApplicationWorkflowDTO appWFDto = new ApplicationWorkflowDTO();
appWFDto.setApplication(application);
appWFDto.setExternalWorkflowReference(appCreationWFExecutor.generateUUID());
appWFDto.setWorkflowReference(String.valueOf(applicationId));
appWFDto.setWorkflowType(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
appWFDto.setCallbackUrl(appCreationWFExecutor.getCallbackURL());
appWFDto.setStatus(WorkflowStatus.CREATED);
appWFDto.setTenantDomain(tenantDomain);
appWFDto.setTenantId(tenantId);
appWFDto.setUserName(userId);
appWFDto.setCreatedTime(System.currentTimeMillis());
appCreationWFExecutor.execute(appWFDto);
} catch (WorkflowException e) {
//If the workflow execution fails, roll back transaction by removing the application entry.
application.setId(applicationId);
apiMgtDAO.deleteApplication(application);
log.error("Unable to execute Application Creation Workflow", e);
handleException("Unable to execute Application Creation Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (log.isDebugEnabled()) {
log.debug("Application Name: " + application.getName() +" added successfully.");
}
return applicationId;
}
/** Updates an Application identified by its id
*
* @param application Application object to be updated
* @throws APIManagementException
*/
@Override
public void updateApplication(Application application) throws APIManagementException {
Application existingApp;
String uuid = application.getUUID();
if (!StringUtils.isEmpty(uuid)) {
existingApp = apiMgtDAO.getApplicationByUUID(uuid);
if (existingApp != null) {
Set<APIKey> keys = getApplicationKeys(existingApp.getId());
for (APIKey key : keys) {
existingApp.addKey(key);
}
}
application.setId(existingApp.getId());
} else {
existingApp = apiMgtDAO.getApplicationById(application.getId());
}
if (existingApp != null && APIConstants.ApplicationStatus.APPLICATION_CREATED.equals(existingApp.getStatus())) {
throw new APIManagementException("Cannot update the application while it is INACTIVE");
}
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().
equalsIgnoreCase(existingApp.getSubscriber().getName());
} else {
isUserAppOwner = application.getSubscriber().getName().equals(existingApp.getSubscriber().getName());
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + application.getSubscriber().getName() + ", " +
"attempted to update application owned by: " + existingApp.getSubscriber().getName());
}
if (application.getName() != null && (application.getName().length() != application.getName().trim().length())) {
handleApplicationNameContainSpacesException("Application name " +
"cannot contain leading or trailing white spaces");
}
String regex = "^[a-zA-Z0-9 ._-]*$";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(application.getName());
if (!matcher.find()) {
handleApplicationNameContainsInvalidCharactersException("Application name contains invalid characters");
}
apiMgtDAO.updateApplication(application);
if (log.isDebugEnabled()) {
log.debug("Successfully updated the Application: " + application.getId() +" in the database.");
}
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.STATUS, existingApp != null ? existingApp.getStatus() : "");
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
try {
invalidateCachedKeys(application.getId());
} catch (APIManagementException ignore) {
//Log and ignore since we do not want to throw exceptions to the front end due to cache invalidation failure.
log.warn("Failed to invalidate Gateway Cache " + ignore.getMessage(), ignore);
}
}
/**
* Function to remove an Application from the API Store
*
* @param application - The Application Object that represents the Application
* @param username
* @throws APIManagementException
*/
@Override
public void removeApplication(Application application, String username) throws APIManagementException {
String uuid = application.getUUID();
if (application.getId() == 0 && !StringUtils.isEmpty(uuid)) {
application = apiMgtDAO.getApplicationByUUID(uuid);
if (application != null) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
}
boolean isTenantFlowStarted = false;
int applicationId = application.getId();
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().equalsIgnoreCase(username);
} else {
isUserAppOwner = application.getSubscriber().getName().equals(username);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + username + ", " +
"attempted to remove application owned by: " + application.getSubscriber().getName());
}
try {
String workflowExtRef;
ApplicationWorkflowDTO workflowDTO;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
isTenantFlowStarted = true;
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
WorkflowExecutor createApplicationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
WorkflowExecutor createProductionRegistrationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
WorkflowExecutor createSandboxRegistrationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
WorkflowExecutor removeApplicationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION);
workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceByApplicationID(application.getId());
// in a normal flow workflowExtRef is null when workflows are not enabled
if (workflowExtRef == null) {
workflowDTO = new ApplicationWorkflowDTO();
} else {
workflowDTO = (ApplicationWorkflowDTO) apiMgtDAO.retrieveWorkflow(workflowExtRef);
}
workflowDTO.setApplication(application);
workflowDTO.setCallbackUrl(removeApplicationWFExecutor.getCallbackURL());
workflowDTO.setUserName(this.username);
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
// Remove from cache first since we won't be able to find active access tokens
// once the application is removed.
invalidateCachedKeys(application.getId());
// clean up pending subscription tasks
Set<Integer> pendingSubscriptions = apiMgtDAO.getPendingSubscriptionsByApplicationId(applicationId);
for (int subscription : pendingSubscriptions) {
try {
workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(subscription);
createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for subscription " + subscription);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending subscription approval task: " + subscription);
}
}
// cleanup pending application registration tasks
String productionKeyStatus = apiMgtDAO
.getRegistrationApprovalState(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
String sandboxKeyStatus = apiMgtDAO
.getRegistrationApprovalState(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
if (WorkflowStatus.CREATED.toString().equals(productionKeyStatus)) {
try {
workflowExtRef = apiMgtDAO
.getRegistrationWFReference(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
createProductionRegistrationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for production key of application "
+ applicationId);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending production key approval task of " + applicationId);
}
}
if (WorkflowStatus.CREATED.toString().equals(sandboxKeyStatus)) {
try {
workflowExtRef = apiMgtDAO
.getRegistrationWFReference(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
createSandboxRegistrationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for sandbox key of application "
+ applicationId);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending sandbox key approval task of " + applicationId);
}
}
if (workflowExtRef != null) {
try {
createApplicationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending application approval task of " + applicationId);
}
}
// update attributes of the new remove workflow to be created
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION);
workflowDTO.setExternalWorkflowReference(removeApplicationWFExecutor.generateUUID());
removeApplicationWFExecutor.execute(workflowDTO);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.DELETED, this.username);
} catch (WorkflowException e) {
String errorMsg = "Could not execute Workflow, " + WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION + " " +
"for applicationID " + application.getId();
handleException(errorMsg, e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (log.isDebugEnabled()) {
String logMessage = "Application Name: " + application.getName() + " successfully removed";
log.debug(logMessage);
}
}
/**
* This method specifically implemented for REST API by removing application and data access logic
* from host object layer. So as per new implementation we need to pass requested scopes to this method
* as tokenScope. So we will do scope related other logic here in this method.
* So host object should only pass required 9 parameters.
* */
@Override
public Map<String, Object> requestApprovalForApplicationRegistration(String userId, String applicationName,
String tokenType, String callbackUrl,
String[] allowedDomains, String validityTime,
String tokenScope, String groupingId,
String jsonString
)
throws APIManagementException {
boolean isTenantFlowStarted = false;
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
int tenantId = MultitenantConstants.INVALID_TENANT_ID;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Unable to retrieve the tenant information of the current user.", e);
}
//checking for authorized scopes
Set<Scope> scopeSet = new LinkedHashSet<Scope>();
List<Scope> authorizedScopes = new ArrayList<Scope>();
String authScopeString;
if (tokenScope != null && tokenScope.length() != 0 &&
!APIConstants.OAUTH2_DEFAULT_SCOPE.equals(tokenScope)) {
scopeSet.addAll(getScopesByScopeKeys(tokenScope, tenantId));
authorizedScopes = getAllowedScopesForUserApplication(userId, scopeSet);
}
if (!authorizedScopes.isEmpty()) {
Set<Scope> authorizedScopeSet = new HashSet<Scope>(authorizedScopes);
StringBuilder scopeBuilder = new StringBuilder();
for (Scope scope : authorizedScopeSet) {
scopeBuilder.append(scope.getKey()).append(' ');
}
authScopeString = scopeBuilder.toString();
} else {
authScopeString = APIConstants.OAUTH2_DEFAULT_SCOPE;
}
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
// initiate WorkflowExecutor
WorkflowExecutor appRegistrationWorkflow = null;
// initiate ApplicationRegistrationWorkflowDTO
ApplicationRegistrationWorkflowDTO appRegWFDto = null;
ApplicationKeysDTO appKeysDto = new ApplicationKeysDTO();
// get APIM application by Application Name and userId.
Application application = ApplicationUtils.retrieveApplication(applicationName, userId, groupingId);
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().equalsIgnoreCase(userId);
} else {
isUserAppOwner = application.getSubscriber().getName().equals(userId);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + application.getSubscriber().getName() + ", " +
"attempted to generate tokens for application owned by: " + userId);
}
// if its a PRODUCTION application.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
// initiate workflow type. By default simple work flow will be
// executed.
appRegistrationWorkflow =
getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
appRegWFDto =
(ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
}// if it is a sandBox application.
else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) { // if
// its
// a
// SANDBOX
// application.
appRegistrationWorkflow =
getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
appRegWFDto =
(ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
} else {
throw new APIManagementException("Invalid Token Type '" + tokenType + "' requested.");
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(callbackUrl)) {
callbackUrl = null;
}
String applicationTokenType = application.getTokenType();
if (StringUtils.isEmpty(application.getTokenType())) {
applicationTokenType = APIConstants.DEFAULT_TOKEN_TYPE;
}
// Build key manager instance and create oAuthAppRequest by jsonString.
OAuthAppRequest request =
ApplicationUtils.createOauthAppRequest(applicationName, null,
callbackUrl, authScopeString, jsonString, applicationTokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.VALIDITY_PERIOD, validityTime);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_CALLBACK_URL, callbackUrl);
// Setting request values in WorkflowDTO - In future we should keep
// Application/OAuthApplication related
// information in the respective entities not in the workflowDTO.
appRegWFDto.setStatus(WorkflowStatus.CREATED);
appRegWFDto.setCreatedTime(System.currentTimeMillis());
appRegWFDto.setTenantDomain(tenantDomain);
appRegWFDto.setTenantId(tenantId);
appRegWFDto.setExternalWorkflowReference(appRegistrationWorkflow.generateUUID());
appRegWFDto.setWorkflowReference(appRegWFDto.getExternalWorkflowReference());
appRegWFDto.setApplication(application);
request.setMappingId(appRegWFDto.getWorkflowReference());
if (!application.getSubscriber().getName().equals(userId)) {
appRegWFDto.setUserName(application.getSubscriber().getName());
} else {
appRegWFDto.setUserName(userId);
}
appRegWFDto.setCallbackUrl(appRegistrationWorkflow.getCallbackURL());
appRegWFDto.setAppInfoDTO(request);
appRegWFDto.setDomainList(allowedDomains);
appRegWFDto.setKeyDetails(appKeysDto);
appRegistrationWorkflow.execute(appRegWFDto);
Map<String, Object> keyDetails = new HashMap<String, Object>();
keyDetails.put("keyState", appRegWFDto.getStatus().toString());
OAuthApplicationInfo applicationInfo = appRegWFDto.getApplicationInfo();
if (applicationInfo != null) {
keyDetails.put("consumerKey", applicationInfo.getClientId());
keyDetails.put("consumerSecret", applicationInfo.getClientSecret());
keyDetails.put("appDetails", applicationInfo.getJsonString());
}
// There can be instances where generating the Application Token is
// not required. In those cases,
// token info will have nothing.
AccessTokenInfo tokenInfo = appRegWFDto.getAccessTokenInfo();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", tokenInfo.getValidityPeriod());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
keyDetails.put("tokenScope", tokenInfo.getScopes());
}
JSONObject appLogObject = new JSONObject();
appLogObject.put("Generated keys for application", application.getName());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyDetails;
} catch (WorkflowException e) {
log.error("Could not execute Workflow", e);
throw new APIManagementException(e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
private static List<Scope> getAllowedScopesForUserApplication(String username,
Set<Scope> reqScopeSet) {
String[] userRoles = null;
org.wso2.carbon.user.api.UserStoreManager userStoreManager = null;
List<Scope> authorizedScopes = new ArrayList<Scope>();
try {
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(MultitenantUtils.getTenantDomain(username));
userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager();
userRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername(username));
} catch (org.wso2.carbon.user.api.UserStoreException e) {
// Log and return since we do not want to stop issuing the token in
// case of scope validation failures.
log.error("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
}
List<String> userRoleList;
if (userRoles != null) {
userRoleList = new ArrayList<String>(Arrays.asList(userRoles));
} else {
userRoleList = Collections.emptyList();
}
//Iterate the requested scopes list.
for (Scope scope : reqScopeSet) {
//Get the set of roles associated with the requested scope.
String roles = scope.getRoles();
//If the scope has been defined in the context of the App and if roles have been defined for the scope
if (roles != null && roles.length() != 0) {
List<String> roleList =
new ArrayList<String>(Arrays.asList(roles.replaceAll(" ", EMPTY_STRING).split(",")));
//Check if user has at least one of the roles associated with the scope
roleList.retainAll(userRoleList);
if (!roleList.isEmpty()) {
authorizedScopes.add(scope);
}
}
}
return authorizedScopes;
}
@Override
public Map<String, String> completeApplicationRegistration(String userId, String applicationName, String tokenType,
String tokenScope, String groupingId)
throws APIManagementException {
Application application = apiMgtDAO.getApplicationByName(applicationName, userId, groupingId);
String status = apiMgtDAO.getRegistrationApprovalState(application.getId(), tokenType);
Map<String, String> keyDetails = null;
if (!application.getSubscriber().getName().equals(userId)) {
userId = application.getSubscriber().getName();
}
String workflowReference = apiMgtDAO.getWorkflowReference(applicationName, userId);
if (workflowReference != null) {
WorkflowDTO workflowDTO = null;
// Creating workflowDTO for the correct key type.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
} else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
}
if (workflowDTO != null) {
// Set the workflow reference in the workflow dto and the populate method will fill in other details
// using the persisted request.
ApplicationRegistrationWorkflowDTO registrationWorkflowDTO = (ApplicationRegistrationWorkflowDTO)
workflowDTO;
registrationWorkflowDTO.setExternalWorkflowReference(workflowReference);
if (APIConstants.AppRegistrationStatus.REGISTRATION_APPROVED.equals(status)) {
apiMgtDAO.populateAppRegistrationWorkflowDTO(registrationWorkflowDTO);
try {
AbstractApplicationRegistrationWorkflowExecutor.dogenerateKeysForApplication
(registrationWorkflowDTO);
AccessTokenInfo tokenInfo = registrationWorkflowDTO.getAccessTokenInfo();
OAuthApplicationInfo oauthApp = registrationWorkflowDTO.getApplicationInfo();
keyDetails = new HashMap<String, String>();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oauthApp.getClientId());
keyDetails.put("consumerSecret", oauthApp.getClientSecret());
keyDetails.put("appDetails", oauthApp.getJsonString());
} catch (APIManagementException e) {
APIUtil.handleException("Error occurred while Creating Keys.", e);
}
}
}
}
return keyDetails;
}
/**
*
* @param userId APIM subscriber user ID.
* @param ApplicationName APIM application name.
* @return
* @throws APIManagementException
*/
@Override
public Application getApplicationsByName(String userId, String ApplicationName, String groupingId) throws
APIManagementException {
Application application = apiMgtDAO.getApplicationByName(ApplicationName, userId,groupingId);
if (application != null) {
checkAppAttributes(application, userId);
}
application = apiMgtDAO.getApplicationWithOAuthApps(ApplicationName, userId, groupingId);
if (application != null) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return application;
}
/**
* Returns the corresponding application given the Id
* @param id Id of the Application
* @return it will return Application corresponds to the id.
* @throws APIManagementException
*/
@Override
public Application getApplicationById(int id) throws APIManagementException {
Application application = apiMgtDAO.getApplicationById(id);
String userId = application.getSubscriber().getName();
checkAppAttributes(application, userId);
return apiMgtDAO.getApplicationById(id);
}
/** get the status of the Application creation process given the application Id
*
* @param applicationId Id of the Application
* @return
* @throws APIManagementException
*/
@Override
public String getApplicationStatusById(int applicationId) throws APIManagementException {
return apiMgtDAO.getApplicationStatusById(applicationId);
}
@Override
public boolean isApplicationTokenExists(String accessToken) throws APIManagementException {
return apiMgtDAO.isAccessTokenExists(accessToken);
}
@Override
public Set<SubscribedAPI> getSubscribedIdentifiers(Subscriber subscriber, APIIdentifier identifier, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPISet = new HashSet<>();
Set<SubscribedAPI> subscribedAPIs = getSubscribedAPIs(subscriber, groupingId);
for (SubscribedAPI api : subscribedAPIs) {
if (api.getApiId().equals(identifier)) {
Set<APIKey> keys = getApplicationKeys(api.getApplication().getId());
for (APIKey key : keys) {
api.addKey(key);
}
subscribedAPISet.add(api);
}
}
return subscribedAPISet;
}
/**
* Returns a list of tiers denied
*
* @return Set<Tier>
*/
@Override
public Set<String> getDeniedTiers() throws APIManagementException {
// '0' is passed as argument whenever tenant id of logged in user is needed
return getDeniedTiers(0);
}
/**
* Returns a list of tiers denied
* @param apiProviderTenantId tenant id of API provider
* @return Set<Tier>
*/
@Override
public Set<String> getDeniedTiers(int apiProviderTenantId) throws APIManagementException {
Set<String> deniedTiers = new HashSet<String>();
String[] currentUserRoles;
if (apiProviderTenantId == 0) {
apiProviderTenantId = tenantId;
}
try {
if (apiProviderTenantId != 0) {
/* Get the roles of the Current User */
currentUserRoles = ((UserRegistry) ((UserAwareAPIConsumer) this).registry).
getUserRealm().getUserStoreManager().getRoleListOfUser(((UserRegistry) this.registry)
.getUserName());
Set<TierPermissionDTO> tierPermissions;
if (APIUtil.isAdvanceThrottlingEnabled()) {
tierPermissions = apiMgtDAO.getThrottleTierPermissions(apiProviderTenantId);
} else {
tierPermissions = apiMgtDAO.getTierPermissions(apiProviderTenantId);
}
for (TierPermissionDTO tierPermission : tierPermissions) {
String type = tierPermission.getPermissionType();
List<String> currentRolesList = new ArrayList<String>(Arrays.asList(currentUserRoles));
List<String> roles = new ArrayList<String>(Arrays.asList(tierPermission.getRoles()));
currentRolesList.retainAll(roles);
if (APIConstants.TIER_PERMISSION_ALLOW.equals(type)) {
/* Current User is not allowed for this Tier*/
if (currentRolesList.isEmpty()) {
deniedTiers.add(tierPermission.getTierName());
}
} else {
/* Current User is denied for this Tier*/
if (currentRolesList.size() > 0) {
deniedTiers.add(tierPermission.getTierName());
}
}
}
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("cannot retrieve user role list for tenant" + tenantDomain, e);
}
return deniedTiers;
}
@Override
public Set<TierPermission> getTierPermissions() throws APIManagementException {
Set<TierPermission> tierPermissions = new HashSet<TierPermission>();
if (tenantId != 0) {
Set<TierPermissionDTO> tierPermissionDtos;
if (APIUtil.isAdvanceThrottlingEnabled()) {
tierPermissionDtos = apiMgtDAO.getThrottleTierPermissions(tenantId);
} else {
tierPermissionDtos = apiMgtDAO.getTierPermissions(tenantId);
}
for (TierPermissionDTO tierDto : tierPermissionDtos) {
TierPermission tierPermission = new TierPermission(tierDto.getTierName());
tierPermission.setRoles(tierDto.getRoles());
tierPermission.setPermissionType(tierDto.getPermissionType());
tierPermissions.add(tierPermission);
}
}
return tierPermissions;
}
/**
* Check whether given Tier is denied for the user
*
* @param tierName
* @return
* @throws APIManagementException if failed to get the tiers
*/
@Override
public boolean isTierDeneid(String tierName) throws APIManagementException {
String[] currentUserRoles;
try {
if (tenantId != 0) {
/* Get the roles of the Current User */
currentUserRoles = ((UserRegistry) ((UserAwareAPIConsumer) this).registry).
getUserRealm().getUserStoreManager().getRoleListOfUser(((UserRegistry) this.registry).getUserName());
TierPermissionDTO tierPermission;
if(APIUtil.isAdvanceThrottlingEnabled()){
tierPermission = apiMgtDAO.getThrottleTierPermission(tierName, tenantId);
}else{
tierPermission = apiMgtDAO.getTierPermission(tierName, tenantId);
}
if (tierPermission == null) {
return false;
} else {
List<String> currentRolesList = new ArrayList<String>(Arrays.asList(currentUserRoles));
List<String> roles = new ArrayList<String>(Arrays.asList(tierPermission.getRoles()));
currentRolesList.retainAll(roles);
if (APIConstants.TIER_PERMISSION_ALLOW.equals(tierPermission.getPermissionType())) {
if (currentRolesList.isEmpty()) {
return true;
}
} else {
if (currentRolesList.size() > 0) {
return true;
}
}
}
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("cannot retrieve user role list for tenant" + tenantDomain, e);
}
return false;
}
private boolean isTenantDomainNotMatching(String tenantDomain) {
if (this.tenantDomain != null) {
return !(this.tenantDomain.equals(tenantDomain));
}
return true;
}
@Override
public Set<API> searchAPI(String searchTerm, String searchType, String tenantDomain)
throws APIManagementException {
return null;
}
public Set<Scope> getScopesBySubscribedAPIs(List<APIIdentifier> identifiers)
throws APIManagementException {
return apiMgtDAO.getScopesBySubscribedAPIs(identifiers);
}
public String getScopesByToken(String accessToken) throws APIManagementException {
return null;
}
public Set<Scope> getScopesByScopeKeys(String scopeKeys, int tenantId)
throws APIManagementException {
return apiMgtDAO.getScopesByScopeKeys(scopeKeys, tenantId);
}
@Override
public String getGroupId(int appId) throws APIManagementException {
return apiMgtDAO.getGroupId(appId);
}
@Override
public String[] getGroupIds(String response) throws APIManagementException {
String groupingExtractorClass = APIUtil.getGroupingExtractorImplementation();
if (groupingExtractorClass != null) {
try {
LoginPostExecutor groupingExtractor = (LoginPostExecutor) APIUtil.getClassForName
(groupingExtractorClass).newInstance();
//switching 2.1.0 and 2.2.0
if (APIUtil.isMultiGroupAppSharingEnabled()) {
NewPostLoginExecutor newGroupIdListExtractor = (NewPostLoginExecutor) groupingExtractor;
return newGroupIdListExtractor.getGroupingIdentifierList(response);
} else {
String groupId = groupingExtractor.getGroupingIdentifiers(response);
return new String[] {groupId};
}
} catch (ClassNotFoundException e) {
String msg = groupingExtractorClass + " is not found in runtime";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (ClassCastException e) {
String msg = "Cannot cast " + groupingExtractorClass + " NewPostLoginExecutor";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (IllegalAccessException e) {
String msg = "Error occurred while invocation of getGroupingIdentifier method";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (InstantiationException e) {
String msg = "Error occurred while instantiating " + groupingExtractorClass + " class";
log.error(msg, e);
throw new APIManagementException(msg, e);
}
}
return null;
}
/**
* Returns all applications associated with given subscriber, groupingId and search criteria.
*
* @param subscriber Subscriber
* @param groupingId The groupId to which the applications must belong.
* @param offset The offset.
* @param search The search string.
* @param sortColumn The sort column.
* @param sortOrder The sort order.
* @return Application[] The Applications.
* @throws APIManagementException
*/
@Override
public Application[] getApplicationsWithPagination(Subscriber subscriber, String groupingId, int start , int offset
, String search, String sortColumn, String sortOrder)
throws APIManagementException {
return apiMgtDAO.getApplicationsWithPagination(subscriber, groupingId, start, offset,
search, sortColumn, sortOrder);
}
/**
* Returns all applications associated with given subscriber and groupingId.
*
* @param subscriber The subscriber.
* @param groupingId The groupId to which the applications must belong.
* @return Application[] Array of applications.
* @throws APIManagementException
*/
@Override
public Application[] getApplications(Subscriber subscriber, String groupingId)
throws APIManagementException {
Application[] applications = apiMgtDAO.getApplications(subscriber, groupingId);
for (Application application : applications) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return applications;
}
/**
* Returns all API keys associated with given application id.
*
* @param applicationId The id of the application.
* @return Set<APIKey> Set of API keys of the application.
* @throws APIManagementException
*/
protected Set<APIKey> getApplicationKeys(int applicationId) throws APIManagementException {
Set<APIKey> apiKeys = new HashSet<APIKey>();
APIKey productionKey = getApplicationKey(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
if (productionKey != null) {
apiKeys.add(productionKey);
} else {
productionKey = apiMgtDAO.getKeyStatusOfApplication(APIConstants.API_KEY_TYPE_PRODUCTION, applicationId);
if (productionKey != null) {
productionKey.setType(APIConstants.API_KEY_TYPE_PRODUCTION);
apiKeys.add(productionKey);
}
}
APIKey sandboxKey = getApplicationKey(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
if (sandboxKey != null) {
apiKeys.add(sandboxKey);
} else {
sandboxKey = apiMgtDAO.getKeyStatusOfApplication(APIConstants.API_KEY_TYPE_SANDBOX, applicationId);
if (sandboxKey != null) {
sandboxKey.setType(APIConstants.API_KEY_TYPE_SANDBOX);
apiKeys.add(sandboxKey);
}
}
return apiKeys;
}
/**
* Returns the key associated with given application id and key type.
*
* @param applicationId Id of the Application.
* @param keyType The type of key.
* @return APIKey The key of the application.
* @throws APIManagementException
*/
protected APIKey getApplicationKey(int applicationId, String keyType) throws APIManagementException {
String consumerKey = apiMgtDAO.getConsumerkeyByApplicationIdAndKeyType(String.valueOf(applicationId), keyType);
if (StringUtils.isNotEmpty(consumerKey)) {
String consumerKeyStatus = apiMgtDAO.getKeyStatusOfApplication(keyType, applicationId).getState();
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
OAuthApplicationInfo oAuthApplicationInfo = keyManager.retrieveApplication(consumerKey);
AccessTokenInfo tokenInfo = keyManager.getAccessTokenByConsumerKey(consumerKey);
APIKey apiKey = new APIKey();
apiKey.setConsumerKey(consumerKey);
apiKey.setType(keyType);
apiKey.setState(consumerKeyStatus);
if (oAuthApplicationInfo != null) {
apiKey.setConsumerSecret(oAuthApplicationInfo.getClientSecret());
apiKey.setCallbackUrl(oAuthApplicationInfo.getCallBackURL());
if (oAuthApplicationInfo.getParameter(APIConstants.JSON_GRANT_TYPES) != null) {
apiKey.setGrantTypes(oAuthApplicationInfo.getParameter(APIConstants.JSON_GRANT_TYPES).toString());
}
}
if (tokenInfo != null) {
apiKey.setAccessToken(tokenInfo.getAccessToken());
apiKey.setValidityPeriod(tokenInfo.getValidityPeriod());
apiKey.setTokenScope(getScopeString(tokenInfo.getScopes()));
} else {
if (log.isDebugEnabled()) {
log.debug("Access token does not exist for Consumer Key: " + consumerKey);
}
}
return apiKey;
}
if (log.isDebugEnabled()) {
log.debug("Consumer key does not exist for Application Id: " + applicationId + " Key Type: " + keyType);
}
return null;
}
/**
* Returns a single string containing the provided array of scopes.
*
* @param scopes The array of scopes.
* @return String Single string containing the provided array of scopes.
*/
private String getScopeString(String[] scopes) {
return StringUtils.join(scopes, " ");
}
@Override
public Application[] getLightWeightApplications(Subscriber subscriber, String groupingId) throws
APIManagementException {
return apiMgtDAO.getLightWeightApplications(subscriber, groupingId);
}
/**
* @param userId Subscriber name.
* @param applicationName of the Application.
* @param tokenType Token type (PRODUCTION | SANDBOX)
* @param callbackUrl callback URL
* @param allowedDomains allowedDomains for token.
* @param validityTime validity time period.
* @param groupingId APIM application id.
* @param jsonString Callback URL for the Application.
* @param tokenScope Scopes for the requested tokens.
* @return
* @throws APIManagementException
*/
@Override
public OAuthApplicationInfo updateAuthClient(String userId, String applicationName,
String tokenType,
String callbackUrl, String[] allowedDomains,
String validityTime,
String tokenScope,
String groupingId,
String jsonString) throws APIManagementException {
boolean tenantFlowStarted = false;
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
tenantFlowStarted = true;
}
Application application = ApplicationUtils.retrieveApplication(applicationName, userId, groupingId);
final String subscriberName = application.getSubscriber().getName();
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = subscriberName.equalsIgnoreCase(userId);
} else {
isUserAppOwner = subscriberName.equals(userId);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + userId + ", attempted to update OAuth application " +
"owned by: " + subscriberName);
}
//Create OauthAppRequest object by passing json String.
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, null, callbackUrl,
tokenScope, jsonString, application.getTokenType());
oauthAppRequest.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
String consumerKey = apiMgtDAO.getConsumerKeyForApplicationKeyType(applicationName, userId, tokenType,
groupingId);
oauthAppRequest.getOAuthApplicationInfo().setClientId(consumerKey);
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//call update method.
OAuthApplicationInfo updatedAppInfo = keyManager.updateApplication(oauthAppRequest);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, updatedAppInfo.getClientName());
appLogObject.put("Updated Oauth app with Call back URL", callbackUrl);
appLogObject.put("Updated Oauth app with grant types", jsonString);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return updatedAppInfo;
} finally {
if (tenantFlowStarted) {
endTenantFlow();
}
}
}
/**
* This method perform delete oAuth application.
*
* @param consumerKey
* @throws APIManagementException
*/
@Override
public void deleteOAuthApplication(String consumerKey) throws APIManagementException {
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//delete oAuthApplication by calling key manager implementation
keyManager.deleteApplication(consumerKey);
Map<String, String> applicationIdAndTokenTypeMap =
apiMgtDAO.getApplicationIdAndTokenTypeByConsumerKey(consumerKey);
if (applicationIdAndTokenTypeMap != null) {
String applicationId = applicationIdAndTokenTypeMap.get("application_id");
String tokenType = applicationIdAndTokenTypeMap.get("token_type");
if (applicationId != null && tokenType != null) {
apiMgtDAO.deleteApplicationKeyMappingByConsumerKey(consumerKey);
apiMgtDAO.deleteApplicationRegistration(applicationId, tokenType);
}
}
}
@Override
public Application[] getApplicationsByOwner(String userId) throws APIManagementException {
return apiMgtDAO.getApplicationsByOwner(userId);
}
@Override
public boolean updateApplicationOwner(String userId, Application application) throws APIManagementException {
boolean isAppUpdated = false;
try {
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(MultitenantUtils.getTenantDomain(username));
UserStoreManager userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager();
String oldUserName = application.getSubscriber().getName();
String[] oldUserRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername
(oldUserName));
String[] newUserRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername
(userId));
List<String> roleList = new ArrayList<String>();
roleList.addAll(Arrays.asList(newUserRoles));
for (String role : oldUserRoles) {
if (role.contains(application.getName())) {
roleList.add(role);
}
}
String[] roleArr = roleList.toArray(new String[roleList.size()]);
APIManagerConfiguration config = getAPIManagerConfiguration();
String serverURL = config.getFirstProperty(APIConstants.AUTH_MANAGER_URL) + "UserAdmin";
String adminUsername = config.getFirstProperty(APIConstants.AUTH_MANAGER_USERNAME);
String adminPassword = config.getFirstProperty(APIConstants.AUTH_MANAGER_PASSWORD);
UserAdminStub userAdminStub = new UserAdminStub(serverURL);
CarbonUtils.setBasicAccessSecurityHeaders(adminUsername, adminPassword, userAdminStub._getServiceClient());
userAdminStub.updateRolesOfUser(userId, roleArr);
isAppUpdated = true;
} catch (org.wso2.carbon.user.api.UserStoreException e) {
handleException("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
} catch (RemoteException e) {
handleException("Server couldn't establish connection with auth manager ", e);
} catch (UserAdminUserAdminException e) {
handleException("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
}
if (isAppUpdated) {
isAppUpdated = apiMgtDAO.updateApplicationOwner(userId, application);
}
//todo update Outh application once the oauth component supports to update the owner
return isAppUpdated;
}
public JSONObject resumeWorkflow(Object[] args) {
JSONObject row = new JSONObject();
if (args != null && APIUtil.isStringArray(args)) {
String workflowReference = (String) args[0];
String status = (String) args[1];
String description = null;
if (args.length > 2 && args[2] != null) {
description = (String) args[2];
}
boolean isTenantFlowStarted = false;
try {
// if (workflowReference != null) {
WorkflowDTO workflowDTO = apiMgtDAO.retrieveWorkflow(workflowReference);
if (workflowDTO == null) {
log.error("Could not find workflow for reference " + workflowReference);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", "Could not find workflow for reference " + workflowReference);
return row;
}
String tenantDomain = workflowDTO.getTenantDomain();
if (tenantDomain != null && !org.wso2.carbon.utils.multitenancy.MultitenantConstants
.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
workflowDTO.setWorkflowDescription(description);
workflowDTO.setStatus(WorkflowStatus.valueOf(status));
String workflowType = workflowDTO.getWorkflowType();
WorkflowExecutor workflowExecutor;
try {
workflowExecutor = getWorkflowExecutor(workflowType);
workflowExecutor.complete(workflowDTO);
} catch (WorkflowException e) {
throw new APIManagementException(e);
}
row.put("error", Boolean.FALSE);
row.put("statusCode", 200);
row.put("message", "Invoked workflow completion successfully.");
// }
} catch (IllegalArgumentException e) {
String msg = "Illegal argument provided. Valid values for status are APPROVED and REJECTED.";
log.error(msg, e);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", msg);
} catch (APIManagementException e) {
String msg = "Error while resuming the workflow. ";
log.error(msg, e);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", msg + e.getMessage());
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
return row;
}
protected void endTenantFlow() {
PrivilegedCarbonContext.endTenantFlow();
}
protected boolean startTenantFlowForTenantDomain(String tenantDomain) {
boolean isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
return isTenantFlowStarted;
}
/**
* Returns a workflow executor
*
* @param workflowType Workflow executor type
* @return WorkflowExecutor of given type
* @throws WorkflowException if an error occurred while getting WorkflowExecutor
*/
protected WorkflowExecutor getWorkflowExecutor(String workflowType) throws WorkflowException {
return WorkflowExecutorFactory.getInstance().getWorkflowExecutor(workflowType);
}
@Override
public boolean isMonetizationEnabled(String tenantDomain) throws APIManagementException {
JSONObject apiTenantConfig = null;
try {
String content = apimRegistryService.getConfigRegistryResourceContent(tenantDomain, APIConstants.API_TENANT_CONF_LOCATION);
if (content != null) {
JSONParser parser = new JSONParser();
apiTenantConfig = (JSONObject) parser.parse(content);
}
} catch (UserStoreException e) {
handleException("UserStoreException thrown when getting API tenant config from registry", e);
} catch (RegistryException e) {
handleException("RegistryException thrown when getting API tenant config from registry", e);
} catch (ParseException e) {
handleException("ParseException thrown when passing API tenant config from registry", e);
}
return getTenantConfigValue(tenantDomain, apiTenantConfig, APIConstants.API_TENANT_CONF_ENABLE_MONITZATION_KEY);
}
private boolean getTenantConfigValue(String tenantDomain, JSONObject apiTenantConfig, String configKey) throws APIManagementException {
if (apiTenantConfig != null) {
Object value = apiTenantConfig.get(configKey);
if (value != null) {
return Boolean.parseBoolean(value.toString());
}
else {
throw new APIManagementException(configKey + " config does not exist for tenant " + tenantDomain);
}
}
return false;
}
/**
* To get the query to retrieve user role list query based on current role list.
*
* @return the query with user role list.
* @throws APIManagementException API Management Exception.
*/
private String getUserRoleListQuery() throws APIManagementException {
StringBuilder rolesQuery = new StringBuilder();
rolesQuery.append('(');
rolesQuery.append(APIConstants.NULL_USER_ROLE_LIST);
String[] userRoles = APIUtil.getListOfRoles((userNameWithoutChange != null)? userNameWithoutChange: username);
if (userRoles != null) {
for (String userRole : userRoles) {
rolesQuery.append(" OR ");
rolesQuery.append(ClientUtils.escapeQueryChars(APIUtil.sanitizeUserRole(userRole.toLowerCase())));
}
}
rolesQuery.append(")");
if(log.isDebugEnabled()) {
log.debug("User role list solr query " + APIConstants.STORE_VIEW_ROLES + "=" + rolesQuery.toString());
}
return APIConstants.STORE_VIEW_ROLES + "=" + rolesQuery.toString();
}
/**
* To get the current user's role list.
*
* @return user role list.
* @throws APIManagementException API Management Exception.
*/
private List<String> getUserRoleList() throws APIManagementException {
List<String> userRoleList;
if (userNameWithoutChange == null) {
userRoleList = new ArrayList<String>() {{
add(APIConstants.NULL_USER_ROLE_LIST);
}};
} else {
userRoleList = new ArrayList<String>(Arrays.asList(APIUtil.getListOfRoles(userNameWithoutChange)));
}
return userRoleList;
}
@Override
protected String getSearchQuery(String searchQuery) throws APIManagementException {
if (!isAccessControlRestrictionEnabled || ( userNameWithoutChange != null &&
APIUtil.hasPermission(userNameWithoutChange, APIConstants.Permissions
.APIM_ADMIN))) {
return searchQuery;
}
String criteria = getUserRoleListQuery();
if (searchQuery != null && !searchQuery.trim().isEmpty()) {
criteria = criteria + "&" + searchQuery;
}
return criteria;
}
@Override
public String getWSDLDocument(String username, String tenantDomain, String resourceUrl,
Map environmentDetails, Map apiDetails) throws APIManagementException {
if (username == null) {
username = APIConstants.END_USER_ANONYMOUS;
}
if (tenantDomain == null) {
tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
Map<String, Object> docResourceMap = APIUtil.getDocument(username, resourceUrl, tenantDomain);
String wsdlContent = "";
if (log.isDebugEnabled()) {
log.debug("WSDL document resource availability: " + docResourceMap.isEmpty());
}
if (!docResourceMap.isEmpty()) {
try {
ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream();
IOUtils.copy((InputStream) docResourceMap.get("Data"), arrayOutputStream);
String apiName = (String) apiDetails.get(API_NAME);
String apiVersion = (String) apiDetails.get(API_VERSION);
String apiProvider = (String) apiDetails.get(API_PROVIDER);
String environmentName = (String) environmentDetails.get(ENVIRONMENT_NAME);
String environmentType = (String) environmentDetails.get(ENVIRONMENT_TYPE);
if (log.isDebugEnabled()) {
log.debug("Published SOAP api gateway environment name: " + environmentName + " environment type: "
+ environmentType);
}
byte[] updatedWSDLContent = this.getUpdatedWSDLByEnvironment(resourceUrl,
arrayOutputStream.toByteArray(), environmentName, environmentType, apiName, apiVersion, apiProvider);
wsdlContent = new String(updatedWSDLContent);
} catch (IOException e) {
handleException("Error occurred while copying wsdl content into byte array stream for resource: "
+ resourceUrl, e);
}
} else {
handleException("No wsdl resource found for resource path: " + resourceUrl);
}
JSONObject data = new JSONObject();
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE,
docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE));
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME,
docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME));
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_DATA, wsdlContent);
if (log.isDebugEnabled()) {
log.debug("Updated wsdl content details for wsdl resource: " + docResourceMap.get("name") + " is " +
data.toJSONString());
}
return data.toJSONString();
}
/**
* To check authorization of the API against current logged in user. If the user is not authorized an exception
* will be thrown.
*
* @param identifier API identifier
* @throws APIManagementException APIManagementException
*/
protected void checkAccessControlPermission(APIIdentifier identifier) throws APIManagementException {
if (identifier == null || !isAccessControlRestrictionEnabled) {
if (!isAccessControlRestrictionEnabled && log.isDebugEnabled() && identifier != null) {
log.debug(
"Publisher access control restriction is not enabled. Hence the API " + identifier.getApiName()
+ " should not be checked for further permission. Registry permission check "
+ "is sufficient");
}
return;
}
String apiPath = APIUtil.getAPIPath(identifier);
Registry registry;
try {
// Need user name with tenant domain to get correct domain name from
// MultitenantUtils.getTenantDomain(username)
String userNameWithTenantDomain = (userNameWithoutChange != null) ? userNameWithoutChange : username;
String apiTenantDomain = getTenantDomain(identifier);
int apiTenantId = getTenantManager().getTenantId(apiTenantDomain);
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(apiTenantDomain)) {
APIUtil.loadTenantRegistry(apiTenantId);
}
if (this.tenantDomain == null || !this.tenantDomain.equals(apiTenantDomain)) { //cross tenant scenario
registry = getRegistryService().getGovernanceUserRegistry(
getTenantAwareUsername(APIUtil.replaceEmailDomainBack(identifier.getProviderName())),
apiTenantId);
} else {
registry = this.registry;
}
Resource apiResource = registry.get(apiPath);
String accessControlProperty = apiResource.getProperty(APIConstants.ACCESS_CONTROL);
if (accessControlProperty == null || accessControlProperty.trim().isEmpty() || accessControlProperty
.equalsIgnoreCase(APIConstants.NO_ACCESS_CONTROL)) {
if (log.isDebugEnabled()) {
log.debug("API in the path " + apiPath + " does not have any access control restriction");
}
return;
}
if (APIUtil.hasPermission(userNameWithTenantDomain, APIConstants.Permissions.APIM_ADMIN)) {
return;
}
String storeVisibilityRoles = apiResource.getProperty(APIConstants.STORE_VIEW_ROLES);
if (storeVisibilityRoles != null && !storeVisibilityRoles.trim().isEmpty()) {
String[] storeVisibilityRoleList = storeVisibilityRoles.split(",");
if (log.isDebugEnabled()) {
log.debug("API has restricted access to users with the roles : " + Arrays
.toString(storeVisibilityRoleList));
}
String[] userRoleList = APIUtil.getListOfRoles(userNameWithTenantDomain);
if (log.isDebugEnabled()) {
log.debug("User " + username + " has roles " + Arrays.toString(userRoleList));
}
for (String role : storeVisibilityRoleList) {
role = role.trim();
if (role.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST) || APIUtil
.compareRoleList(userRoleList, role)) {
return;
}
}
if (log.isDebugEnabled()) {
log.debug("API " + identifier + " cannot be accessed by user '" + username + "'. It "
+ "has a store visibility restriction");
}
throw new APIManagementException(
APIConstants.UN_AUTHORIZED_ERROR_MESSAGE + " view the API " + identifier);
}
} catch (RegistryException e) {
throw new APIManagementException(
"Registry Exception while trying to check the store visibility restriction of API " + identifier
.getApiName(), e);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
String msg = "Failed to get API from : " + apiPath;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
}
/**
* This method is used to get the updated wsdl with the respective environment apis are published
*
* @param wsdlResourcePath registry resource path to the wsdl
* @param wsdlContent wsdl resource content as byte array
* @param environmentType gateway environment type
* @return updated wsdl content with environment endpoints
* @throws APIManagementException
*/
private byte[] getUpdatedWSDLByEnvironment(String wsdlResourcePath, byte[] wsdlContent, String environmentName,
String environmentType, String apiName, String apiVersion, String apiProvider) throws APIManagementException {
APIMWSDLReader apimwsdlReader = new APIMWSDLReader(wsdlResourcePath);
Definition definition = apimwsdlReader.getWSDLDefinitionFromByteContent(wsdlContent, false);
byte[] updatedWSDLContent = null;
boolean isTenantFlowStarted = false;
try {
String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(apiProvider));
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService();
int tenantId;
UserRegistry registry;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
APIUtil.loadTenantRegistry(tenantId);
registry = registryService.getGovernanceSystemRegistry(tenantId);
API api = null;
if (!StringUtils.isEmpty(apiName) && !StringUtils.isEmpty(apiVersion)) {
APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(apiProvider), apiName, apiVersion);
if (log.isDebugEnabled()) {
log.debug("Api identifier for the soap api artifact: " + apiIdentifier + "for api name: "
+ apiName + ", version: " + apiVersion);
}
GenericArtifact apiArtifact = APIUtil.getAPIArtifact(apiIdentifier, registry);
api = APIUtil.getAPI(apiArtifact);
if (log.isDebugEnabled()) {
if (api != null) {
log.debug(
"Api context for the artifact with id:" + api.getId() + " is " + api.getContext());
} else {
log.debug("Api does not exist for api name: " + apiIdentifier.getApiName());
}
}
} else {
handleException("Artifact does not exist in the registry for api name: " + apiName +
" and version: " + apiVersion);
}
if (api != null) {
try {
apimwsdlReader.setServiceDefinition(definition, api, environmentName, environmentType);
if (log.isDebugEnabled()) {
log.debug("Soap api with context:" + api.getContext() + " in " + environmentName
+ " with environment type" + environmentType);
}
updatedWSDLContent = apimwsdlReader.getWSDL(definition);
} catch (APIManagementException e) {
handleException("Error occurred while processing the wsdl for api: " + api.getId());
}
} else {
handleException("Error while getting API object for wsdl artifact");
}
} catch (UserStoreException e) {
handleException("Error while reading tenant information", e);
} catch (RegistryException e) {
handleException("Error when create registry instance", e);
}
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
return updatedWSDLContent;
}
/**
* This method is used to get keys of custom attributes, configured by user
*
* @param userId user name of logged in user
* @return Array of JSONObject, contains keys of attributes
* @throws APIManagementException
*/
public JSONArray getAppAttributesFromConfig(String userId) throws APIManagementException {
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
int tenantId = 0;
try {
tenantId = getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Error in getting tenantId of " + tenantDomain, e);
}
JSONArray applicationAttributes = null;
JSONObject applicationConfig = APIUtil.getAppAttributeKeysFromRegistry(tenantId);
try {
if (applicationConfig != null) {
applicationAttributes = (JSONArray) applicationConfig.get(APIConstants.ApplicationAttributes.ATTRIBUTES);
} else {
APIManagerConfiguration configuration = getAPIManagerConfiguration();
applicationAttributes = configuration.getApplicationAttributes();
}
} catch (NullPointerException e){
handleException("Error in reading configuration " + e.getMessage(), e);
}
return applicationAttributes;
}
/**
* This method is used to validate keys of custom attributes, configured by user
*
* @param application
* @param userId user name of logged in user
* @throws APIManagementException
*/
public void checkAppAttributes(Application application, String userId) throws APIManagementException {
JSONArray applicationAttributesFromConfig = getAppAttributesFromConfig(userId);
Map<String, String> applicationAttributes = application.getApplicationAttributes();
List attributeKeys = new ArrayList<String>();
int applicationId = application.getId();
int tenantId = 0;
Map<String, String> newApplicationAttributes = new HashMap<>();
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
try {
tenantId = getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Error in getting tenantId of " + tenantDomain, e);
}
for (Object object : applicationAttributesFromConfig) {
JSONObject attribute = (JSONObject) object;
attributeKeys.add(attribute.get(APIConstants.ApplicationAttributes.ATTRIBUTE));
}
for (Object key : applicationAttributes.keySet()) {
if (!attributeKeys.contains(key)) {
apiMgtDAO.deleteApplicationAttributes((String) key, applicationId);
if (log.isDebugEnabled()) {
log.debug("Removing " + key + "from application - " + application.getName());
}
}
}
for (Object key : attributeKeys) {
if (!applicationAttributes.keySet().contains(key)) {
newApplicationAttributes.put((String) key, "");
}
}
apiMgtDAO.addApplicationAttributes(newApplicationAttributes, applicationId, tenantId);
}
}
| components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIConsumerImpl.java | /*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.impl;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.wso2.carbon.CarbonConstants;
import org.wso2.carbon.apimgt.api.APIConsumer;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.APIMgtResourceNotFoundException;
import org.wso2.carbon.apimgt.api.LoginPostExecutor;
import org.wso2.carbon.apimgt.api.NewPostLoginExecutor;
import org.wso2.carbon.apimgt.api.WorkflowResponse;
import org.wso2.carbon.apimgt.api.model.API;
import org.wso2.carbon.apimgt.api.model.APIIdentifier;
import org.wso2.carbon.apimgt.api.model.APIKey;
import org.wso2.carbon.apimgt.api.model.APIRating;
import org.wso2.carbon.apimgt.api.model.AccessTokenInfo;
import org.wso2.carbon.apimgt.api.model.AccessTokenRequest;
import org.wso2.carbon.apimgt.api.model.Application;
import org.wso2.carbon.apimgt.api.model.ApplicationConstants;
import org.wso2.carbon.apimgt.api.model.ApplicationKeysDTO;
import org.wso2.carbon.apimgt.api.model.Documentation;
import org.wso2.carbon.apimgt.api.model.KeyManager;
import org.wso2.carbon.apimgt.api.model.OAuthAppRequest;
import org.wso2.carbon.apimgt.api.model.OAuthApplicationInfo;
import org.wso2.carbon.apimgt.api.model.Scope;
import org.wso2.carbon.apimgt.api.model.SubscribedAPI;
import org.wso2.carbon.apimgt.api.model.Subscriber;
import org.wso2.carbon.apimgt.api.model.SubscriptionResponse;
import org.wso2.carbon.apimgt.api.model.Tag;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.api.model.TierPermission;
import org.wso2.carbon.apimgt.impl.caching.CacheInvalidator;
import org.wso2.carbon.apimgt.impl.dto.ApplicationRegistrationWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.ApplicationWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.SubscriptionWorkflowDTO;
import org.wso2.carbon.apimgt.impl.dto.TierPermissionDTO;
import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO;
import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.apimgt.impl.utils.APIMWSDLReader;
import org.wso2.carbon.apimgt.impl.utils.APINameComparator;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.impl.utils.APIVersionComparator;
import org.wso2.carbon.apimgt.impl.utils.ApplicationUtils;
import org.wso2.carbon.apimgt.impl.workflow.AbstractApplicationRegistrationWorkflowExecutor;
import org.wso2.carbon.apimgt.impl.workflow.GeneralWorkflowResponse;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowConstants;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowException;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutor;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutorFactory;
import org.wso2.carbon.apimgt.impl.workflow.WorkflowStatus;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import org.wso2.carbon.governance.api.generic.GenericArtifactManager;
import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact;
import org.wso2.carbon.governance.api.util.GovernanceUtils;
import org.wso2.carbon.registry.common.TermData;
import org.wso2.carbon.registry.core.ActionConstants;
import org.wso2.carbon.registry.core.Association;
import org.wso2.carbon.registry.core.Registry;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.Resource;
import org.wso2.carbon.registry.core.config.RegistryContext;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.pagination.PaginationContext;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import org.wso2.carbon.user.api.AuthorizationManager;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.mgt.stub.UserAdminStub;
import org.wso2.carbon.user.mgt.stub.UserAdminUserAdminException;
import org.wso2.carbon.utils.CarbonUtils;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.cache.Caching;
import javax.wsdl.Definition;
/**
* This class provides the core API store functionality. It is implemented in a very
* self-contained and 'pure' manner, without taking requirements like security into account,
* which are subject to frequent change. Due to this 'pure' nature and the significance of
* the class to the overall API management functionality, the visibility of the class has
* been reduced to package level. This means we can still use it for internal purposes and
* possibly even extend it, but it's totally off the limits of the users. Users wishing to
* programmatically access this functionality should use one of the extensions of this
* class which is visible to them. These extensions may add additional features like
* security to this class.
*/
class APIConsumerImpl extends AbstractAPIManager implements APIConsumer {
private static final Log log = LogFactory.getLog(APIConsumerImpl.class);
public static final char COLON_CHAR = ':';
public static final String EMPTY_STRING = "";
public static final String ENVIRONMENT_NAME = "environmentName";
public static final String ENVIRONMENT_TYPE = "environmentType";
public static final String API_NAME = "apiName";
public static final String API_VERSION = "apiVersion";
public static final String API_PROVIDER = "apiProvider";
/* Map to Store APIs against Tag */
private ConcurrentMap<String, Set<API>> taggedAPIs = new ConcurrentHashMap<String, Set<API>>();
private boolean isTenantModeStoreView;
private String requestedTenant;
private boolean isTagCacheEnabled;
private Set<Tag> tagSet;
private long tagCacheValidityTime;
private volatile long lastUpdatedTime;
private volatile long lastUpdatedTimeForTagApi;
private final Object tagCacheMutex = new Object();
private final Object tagWithAPICacheMutex = new Object();
protected APIMRegistryService apimRegistryService;
protected String userNameWithoutChange;
public APIConsumerImpl() throws APIManagementException {
super();
readTagCacheConfigs();
}
public APIConsumerImpl(String username, APIMRegistryService apimRegistryService) throws APIManagementException {
super(username);
userNameWithoutChange = username;
readTagCacheConfigs();
this.apimRegistryService = apimRegistryService;
}
private void readTagCacheConfigs() {
APIManagerConfiguration config = getAPIManagerConfiguration();
String enableTagCache = config.getFirstProperty(APIConstants.STORE_TAG_CACHE_DURATION);
if (enableTagCache == null) {
isTagCacheEnabled = false;
tagCacheValidityTime = 0;
} else {
isTagCacheEnabled = true;
tagCacheValidityTime = Long.parseLong(enableTagCache);
}
}
@Override
public Subscriber getSubscriber(String subscriberId) throws APIManagementException {
Subscriber subscriber = null;
try {
subscriber = apiMgtDAO.getSubscriber(subscriberId);
} catch (APIManagementException e) {
handleException("Failed to get Subscriber", e);
}
return subscriber;
}
/**
* Returns the set of APIs with the given tag from the taggedAPIs Map
*
* @param tagName The name of the tag
* @return Set of {@link API} with the given tag
* @throws APIManagementException
*/
@Override
public Set<API> getAPIsWithTag(String tagName, String requestedTenantDomain) throws APIManagementException {
/* We keep track of the lastUpdatedTime of the TagCache to determine its freshness.
*/
long lastUpdatedTimeAtStart = lastUpdatedTimeForTagApi;
long currentTimeAtStart = System.currentTimeMillis();
if(isTagCacheEnabled && ( (currentTimeAtStart- lastUpdatedTimeAtStart) < tagCacheValidityTime)){
if (taggedAPIs != null && taggedAPIs.containsKey(tagName)) {
return taggedAPIs.get(tagName);
}
}else{
synchronized (tagWithAPICacheMutex) {
lastUpdatedTimeForTagApi = System.currentTimeMillis();
taggedAPIs = new ConcurrentHashMap<String, Set<API>>();
}
}
boolean isTenantMode = requestedTenantDomain != null && !"null".equalsIgnoreCase(requestedTenantDomain);
this.isTenantModeStoreView = isTenantMode;
if (requestedTenantDomain != null && !"null".equals(requestedTenantDomain)) {
this.requestedTenant = requestedTenantDomain;
}
Registry userRegistry;
boolean isTenantFlowStarted = false;
Set<API> apisWithTag = null;
try {
//start the tenant flow prior to loading registry
if (requestedTenant != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(requestedTenant)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(requestedTenantDomain);
}
if ((isTenantMode && this.tenantDomain == null) ||
(isTenantMode && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(requestedTenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
apisWithTag = getAPIsWithTag(userRegistry, tagName);
/* Add the APIs against the tag name */
if (!apisWithTag.isEmpty()) {
if (taggedAPIs.containsKey(tagName)) {
for (API api : apisWithTag) {
taggedAPIs.get(tagName).add(api);
}
} else {
taggedAPIs.putIfAbsent(tagName, apisWithTag);
}
}
} catch (RegistryException e) {
handleException("Failed to get api by the tag", e);
} catch (UserStoreException e) {
handleException("Failed to get api by the tag", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
return apisWithTag;
}
protected void setUsernameToThreadLocalCarbonContext(String username) {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(username);
}
protected UserRegistry getGovernanceUserRegistry(int tenantId) throws RegistryException {
return ServiceReferenceHolder.getInstance().getRegistryService().
getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId);
}
protected int getTenantId(String requestedTenantDomain) throws UserStoreException {
return ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(requestedTenantDomain);
}
/**
* Returns the set of APIs with the given tag from the taggedAPIs Map.
*
* @param tag The name of the tag
* @param start The starting index of the return result set
* @param end The end index of the return result set
* @return A {@link Map} of APIs(between the given indexes) and the total number indicating all the available
* APIs count
* @throws APIManagementException
*/
@Override
public Map<String, Object> getPaginatedAPIsWithTag(String tag, int start, int end, String tenantDomain) throws APIManagementException {
List<API> apiList = new ArrayList<API>();
Set<API> resultSet = new TreeSet<API>(new APIVersionComparator());
Map<String, Object> results = new HashMap<String, Object>();
Set<API> taggedAPISet = this.getAPIsWithTag(tag,tenantDomain);
if (taggedAPISet != null) {
if (taggedAPISet.size() < end) {
end = taggedAPISet.size();
}
int totalLength;
apiList.addAll(taggedAPISet);
totalLength = apiList.size();
if (totalLength <= ((start + end) - 1)) {
end = totalLength;
} else {
end = start + end;
}
for (int i = start; i < end; i++) {
resultSet.add(apiList.get(i));
}
results.put("apis", resultSet);
results.put("length", taggedAPISet.size());
} else {
results.put("apis", null);
results.put("length", 0);
}
return results;
}
/**
* Returns the set of APIs with the given tag, retrieved from registry
*
* @param registry - Current registry; tenant/SuperTenant
* @param tag - The tag name
* @return A {@link Set} of {@link API} objects.
* @throws APIManagementException
*/
private Set<API> getAPIsWithTag(Registry registry, String tag)
throws APIManagementException {
Set<API> apiSet = new TreeSet<API>(new APINameComparator());
try {
List<GovernanceArtifact> genericArtifacts =
GovernanceUtils.findGovernanceArtifacts(getSearchQuery(APIConstants.TAG_SEARCH_TYPE_PREFIX2 + tag), registry,
APIConstants.API_RXT_MEDIA_TYPE);
for (GovernanceArtifact genericArtifact : genericArtifacts) {
try {
String apiStatus = APIUtil.getLcStateFromArtifact(genericArtifact);
if (genericArtifact != null && (APIConstants.PUBLISHED.equals(apiStatus)
|| APIConstants.PROTOTYPED.equals(apiStatus))) {
API api = APIUtil.getAPI(genericArtifact);
if (api != null) {
apiSet.add(api);
}
}
} catch (RegistryException e) {
log.warn("User is not authorized to get an API with tag " + tag, e);
}
}
} catch (RegistryException e) {
handleException("Failed to get API for tag " + tag, e);
}
return apiSet;
}
/**
* The method to get APIs to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
public Set<API> getAllPublishedAPIs(String tenantDomain) throws APIManagementException {
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
userRegistry = getGovernanceUserRegistry(tenantId);
} else {
userRegistry = registry;
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts();
if (genericArtifacts == null || genericArtifacts.length == 0) {
return apiSortedSet;
}
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
for (GenericArtifact artifact : genericArtifacts) {
// adding the API provider can mark the latest API .
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
try {
checkAccessControlPermission(api.getId());
} catch (APIManagementException e) {
// This is a second level of filter to get apis based on access control and visibility.
// Hence log is set as debug and continued.
if(log.isDebugEnabled()) {
log.debug("User is not authorized to view the api " + api.getId().getApiName(), e);
}
continue;
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
return apiSortedSet;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
return apiVersionsSortedSet;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving APIs for store. User : " + PrivilegedCarbonContext
.getThreadLocalCarbonContext().getUsername();
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
}
return apiSortedSet;
}
/**
* The method to get APIs to Store view *
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
@Deprecated
public Map<String,Object> getAllPaginatedPublishedAPIs(String tenantDomain,int start,int end)
throws APIManagementException {
Boolean displayAPIsWithMultipleStatus = false;
try {
if (tenantDomain != null) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
}finally {
endTenantFlow();
}
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
//Create the search attribute map
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
} else{
return getAllPaginatedAPIs(tenantDomain, start, end);
}
Map<String, Object> result = new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength = 0;
try {
Registry userRegistry;
boolean isTenantMode = (tenantDomain != null);
if ((isTenantMode && this.tenantDomain == null) ||
(isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting paginated published API.");
continue;
}
// adding the API provider can mark the latest API .
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis", apiVersionsSortedSet);
result.put("totalLength", totalLength);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all Published APIs.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
/**
* Regenerate consumer secret.
*
* @param clientId For which consumer key we need to regenerate consumer secret.
* @return New consumer secret.
* @throws APIManagementException This is the custom exception class for API management.
*/
public String renewConsumerSecret(String clientId) throws APIManagementException {
// Create Token Request with parameters provided from UI.
AccessTokenRequest tokenRequest = new AccessTokenRequest();
tokenRequest.setClientId(clientId);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
return keyManager.getNewApplicationConsumerSecret(tokenRequest);
}
/**
* The method to get APIs in any of the given LC status array
*
* @return Map<String, Object> API result set with pagination information
* @throws APIManagementException
*/
@Override
public Map<String, Object> getAllPaginatedAPIsByStatus(String tenantDomain,
int start, int end, final String[] apiStatus, boolean returnAPITags) throws APIManagementException {
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
boolean isMore = false;
String criteria = APIConstants.LCSTATE_SEARCH_TYPE_KEY;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
String paginationLimit = getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
criteria = criteria + APIUtil.getORBasedSearchCriteria(apiStatus);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
if (apiStatus != null && apiStatus.length > 0) {
List<GovernanceArtifact> genericArtifacts = GovernanceUtils.findGovernanceArtifacts
(getSearchQuery(criteria), userRegistry, APIConstants.API_RXT_MEDIA_TYPE);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.size() == 0) {
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist so we cannot determine the total API count without incurring a
// performance hit
--totalLength; // Remove the additional 1 we added earlier when setting max pagination limit
}
int tempLength = 0;
for (GovernanceArtifact artifact : genericArtifacts) {
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//log and continue since we want to load the rest of the APIs.
log.error("Error while loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME),
e);
}
if (api != null) {
if (returnAPITags) {
String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId());
Set<String> tags = new HashSet<String>();
org.wso2.carbon.registry.core.Tag[] tag = registry.getTags(artifactPath);
for (org.wso2.carbon.registry.core.Tag tag1 : tag) {
tags.add(tag1.getTagName());
}
api.addTags(tags);
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
tempLength++;
if (tempLength >= totalLength) {
break;
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis", apiVersionsSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all paginated APIs by status.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
/**
* The method to get APIs by given status to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Override
@Deprecated
public Map<String, Object> getAllPaginatedAPIsByStatus(String tenantDomain,
int start, int end, final String apiStatus, boolean returnAPITags) throws APIManagementException {
try {
if (tenantDomain != null) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
}finally {
endTenantFlow();
}
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (APIConstants.PROTOTYPED.equals(apiStatus)) {
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(apiStatus);
}});
} else {
if (!displayAPIsWithMultipleStatus) {
//Create the search attribute map
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(apiStatus);
}});
} else {
return getAllPaginatedAPIs(tenantDomain, start, end);
}
}
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
boolean isMore = false;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
String paginationLimit = getAPIManagerConfiguration()
.getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength=PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist so we cannot determine the total API count without incurring a
// performance hit
--totalLength; // Remove the additional 1 we added earlier when setting max pagination limit
}
int tempLength=0;
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting all paginated APIs by status.");
continue;
}
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//log and continue since we want to load the rest of the APIs.
log.error("Error while loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME),
e);
}
if (api != null) {
if (returnAPITags) {
String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId());
Set<String> tags = new HashSet<String>();
org.wso2.carbon.registry.core.Tag[] tag = registry.getTags(artifactPath);
for (org.wso2.carbon.registry.core.Tag tag1 : tag) {
tags.add(tag1.getTagName());
}
api.addTags(tags);
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
tempLength++;
if (tempLength >= totalLength){
break;
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis",apiVersionsSortedSet);
result.put("totalLength",totalLength);
result.put("isMore", isMore);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving APIs by status.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
result.put("isMore", isMore);
return result;
}
/**
* Re-generates the access token.
* @param oldAccessToken Token to be revoked
* @param clientId Consumer Key for the Application
* @param clientSecret Consumer Secret for the Application
* @param validityTime Desired Validity time for the token
* @param jsonInput Additional parameters if Authorization server needs any.
* @return Renewed Access Token.
* @throws APIManagementException
*/
@Override
public AccessTokenInfo renewAccessToken(String oldAccessToken, String clientId, String clientSecret,
String validityTime, String
requestedScopes[], String jsonInput) throws APIManagementException {
// Create Token Request with parameters provided from UI.
AccessTokenRequest tokenRequest = new AccessTokenRequest();
tokenRequest.setClientId(clientId);
tokenRequest.setClientSecret(clientSecret);
tokenRequest.setValidityPeriod(Long.parseLong(validityTime));
tokenRequest.setTokenToRevoke(oldAccessToken);
tokenRequest.setScope(requestedScopes);
try {
// Populating additional parameters.
tokenRequest = ApplicationUtils.populateTokenRequest(jsonInput, tokenRequest);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
JSONObject appLogObject = new JSONObject();
appLogObject.put("Re-Generated Keys for application with client Id", clientId);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyManager.getNewApplicationAccessToken(tokenRequest);
} catch (APIManagementException e) {
log.error("Error while re-generating AccessToken", e);
throw e;
}
}
/**
* The method to get All PUBLISHED and DEPRECATED APIs, to Store view
*
* @return Set<API> Set of APIs
* @throws APIManagementException
*/
@Deprecated
public Map<String,Object> getAllPaginatedAPIs(String tenantDomain,int start,int end) throws APIManagementException {
Map<String,Object> result=new HashMap<String, Object>();
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
int totalLength=0;
try {
Registry userRegistry;
boolean isTenantMode=(tenantDomain != null);
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant store anonymous mode
int tenantId = getTenantId(tenantDomain);
userRegistry = getGovernanceUserRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
}
this.isTenantModeStoreView = isTenantMode;
this.requestedTenant = tenantDomain;
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
boolean noPublishedAPIs = false;
if (artifactManager != null) {
//Create the search attribute map for PUBLISHED APIs
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
if (genericArtifacts == null || genericArtifacts.length == 0) {
noPublishedAPIs = true;
}
int publishedAPICount;
if (genericArtifacts != null) {
for (GenericArtifact artifact : genericArtifacts) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting all paginated APIs.");
continue;
}
// adding the API provider can mark the latest API .
// String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS);
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
// key = api.getId().getProviderName() + ":" + api.getId().getApiName() + ":" + api.getId()
// .getVersion();
multiVersionedAPIs.add(api);
}
}
}
}
if (!displayMultipleVersions) {
publishedAPICount = latestPublishedAPIs.size();
} else {
publishedAPICount = multiVersionedAPIs.size();
}
if ((start + end) > publishedAPICount) {
if (publishedAPICount > 0) {
/*Starting to retrieve DEPRECATED APIs*/
start = 0;
/* publishedAPICount is always less than end*/
end = end - publishedAPICount;
} else {
start = start - totalLength;
}
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, Integer.MAX_VALUE);
//Create the search attribute map for DEPRECATED APIs
Map<String, List<String>> listMapForDeprecatedAPIs = new HashMap<String, List<String>>();
listMapForDeprecatedAPIs.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.DEPRECATED);
}});
GenericArtifact[] genericArtifactsForDeprecatedAPIs = artifactManager.findGenericArtifacts(listMapForDeprecatedAPIs);
totalLength = totalLength + PaginationContext.getInstance().getLength();
if ((genericArtifactsForDeprecatedAPIs == null || genericArtifactsForDeprecatedAPIs.length == 0) && noPublishedAPIs) {
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
return result;
}
if (genericArtifactsForDeprecatedAPIs != null) {
for (GenericArtifact artifact : genericArtifactsForDeprecatedAPIs) {
if (artifact == null) {
log.error("Failed to retrieve artifact when getting deprecated APIs.");
continue;
}
// adding the API provider can mark the latest API .
API api = APIUtil.getAPI(artifact);
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
}
}
}
if (!displayMultipleVersions) {
for (API api : latestPublishedAPIs.values()) {
apiSortedSet.add(api);
}
result.put("apis",apiSortedSet);
result.put("totalLength",totalLength);
return result;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
result.put("apis",apiVersionsSortedSet);
result.put("totalLength",totalLength);
return result;
}
} else {
String errorMessage = "Artifact manager is null for tenant domain " + tenantDomain
+ " when retrieving all paginated APIs.";
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
}finally {
PaginationContext.destroy();
}
result.put("apis", apiSortedSet);
result.put("totalLength", totalLength);
return result;
}
@Override
public Set<API> getTopRatedAPIs(int limit) throws APIManagementException {
int returnLimit = 0;
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
try {
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage = "Artifact manager is null when retrieving top rated APIs.";
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts();
if (genericArtifacts == null || genericArtifacts.length == 0) {
return apiSortedSet;
}
for (GenericArtifact genericArtifact : genericArtifacts) {
String status = APIUtil.getLcStateFromArtifact(genericArtifact);
if (APIConstants.PUBLISHED.equals(status)) {
String artifactPath = genericArtifact.getPath();
float rating = registry.getAverageRating(artifactPath);
if (rating > APIConstants.TOP_TATE_MARGIN && (returnLimit < limit)) {
returnLimit++;
API api = APIUtil.getAPI(genericArtifact, registry);
if (api != null) {
apiSortedSet.add(api);
}
}
}
}
} catch (RegistryException e) {
handleException("Failed to get top rated API", e);
}
return apiSortedSet;
}
/**
* Get the recently added APIs set
*
* @param limit no limit. Return everything else, limit the return list to specified value.
* @return Set<API>
* @throws APIManagementException
*/
@Override
public Set<API> getRecentlyAddedAPIs(int limit, String tenantDomain)
throws APIManagementException {
SortedSet<API> recentlyAddedAPIs = new TreeSet<API>(new APINameComparator());
SortedSet<API> recentlyAddedAPIsWithMultipleVersions = new TreeSet<API>(new APIVersionComparator());
Registry userRegistry;
APIManagerConfiguration config = getAPIManagerConfiguration();
boolean isRecentlyAddedAPICacheEnabled =
Boolean.parseBoolean(config.getFirstProperty(APIConstants.API_STORE_RECENTLY_ADDED_API_CACHE_ENABLE));
PrivilegedCarbonContext.startTenantFlow();
boolean isTenantFlowStarted ;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
isTenantFlowStarted = true;
} else {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true);
isTenantFlowStarted = true;
}
try {
boolean isTenantMode = (tenantDomain != null);
if ((isTenantMode && this.tenantDomain == null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) {//Tenant based store anonymous mode
int tenantId = getTenantId(tenantDomain);
// explicitly load the tenant's registry
APIUtil.loadTenantRegistry(tenantId);
setUsernameToThreadLocalCarbonContext(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME);
isTenantFlowStarted = true;
userRegistry = getGovernanceUserRegistry(tenantId);
} else {
userRegistry = registry;
setUsernameToThreadLocalCarbonContext(this.username);
isTenantFlowStarted = true;
}
if (isRecentlyAddedAPICacheEnabled) {
boolean isStatusChanged = false;
Set<API> recentlyAddedAPI = (Set<API>) Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME).get(username + COLON_CHAR + tenantDomain);
if (recentlyAddedAPI != null) {
for (API api : recentlyAddedAPI) {
try {
if (!APIConstants.PUBLISHED.equalsIgnoreCase(userRegistry.get(APIUtil.getAPIPath(api.getId())).getProperty(APIConstants.API_STATUS))) {
isStatusChanged = true;
break;
}
} catch (Exception ex) {
log.error("Error while checking API status for APP " + api.getId().getApiName() + '-' +
api.getId().getVersion(), ex);
}
}
if (!isStatusChanged) {
return recentlyAddedAPI;
}
}
}
PaginationContext.init(0, limit, APIConstants.REGISTRY_ARTIFACT_SEARCH_DESC_ORDER,
APIConstants.CREATED_DATE, Integer.MAX_VALUE);
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
listMap.put(APIConstants.STORE_VIEW_ROLES, getUserRoleList());
String searchCriteria = APIConstants.LCSTATE_SEARCH_KEY + "= (" + APIConstants.PUBLISHED + ")";
//Find UUID
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY);
if (artifactManager != null) {
GenericArtifact[] genericArtifacts = artifactManager.findGovernanceArtifacts(getSearchQuery(searchCriteria));
SortedSet<API> allAPIs = new TreeSet<API>(new APINameComparator());
for (GenericArtifact artifact : genericArtifacts) {
API api = null;
try {
api = APIUtil.getAPI(artifact);
} catch (APIManagementException e) {
//just log and continue since we want to go through the other APIs as well.
log.error("Error loading API " + artifact.getAttribute(APIConstants.API_OVERVIEW_NAME), e);
}
if (api != null) {
allAPIs.add(api);
}
}
if (!APIUtil.isAllowDisplayMultipleVersions()) {
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
Comparator<API> versionComparator = new APIVersionComparator();
String key;
for (API api : allAPIs) {
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same
// name, make sure this one has a higher version
// number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
}
recentlyAddedAPIs.addAll(latestPublishedAPIs.values());
if (isRecentlyAddedAPICacheEnabled) {
Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME)
.put(username + COLON_CHAR + tenantDomain, allAPIs);
}
return recentlyAddedAPIs;
} else {
recentlyAddedAPIsWithMultipleVersions.addAll(allAPIs);
if (isRecentlyAddedAPICacheEnabled) {
Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER)
.getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME)
.put(username + COLON_CHAR + tenantDomain, allAPIs);
}
return recentlyAddedAPIsWithMultipleVersions;
}
} else {
String errorMessage = "Artifact manager is null when retrieving recently added APIs for tenant domain "
+ tenantDomain;
log.error(errorMessage);
}
} catch (RegistryException e) {
handleException("Failed to get all published APIs", e);
} catch (UserStoreException e) {
handleException("Failed to get all published APIs", e);
} finally {
PaginationContext.destroy();
if (isTenantFlowStarted) {
endTenantFlow();
}
}
return recentlyAddedAPIs;
}
@Override
public Set<Tag> getAllTags(String requestedTenantDomain) throws APIManagementException {
this.isTenantModeStoreView = (requestedTenantDomain != null);
if(requestedTenantDomain != null){
this.requestedTenant = requestedTenantDomain;
}
/* We keep track of the lastUpdatedTime of the TagCache to determine its freshness.
*/
long lastUpdatedTimeAtStart = lastUpdatedTime;
long currentTimeAtStart = System.currentTimeMillis();
if(isTagCacheEnabled && ( (currentTimeAtStart- lastUpdatedTimeAtStart) < tagCacheValidityTime)){
if(tagSet != null){
return tagSet;
}
}
TreeSet<Tag> tempTagSet = new TreeSet<Tag>(new Comparator<Tag>() {
@Override
public int compare(Tag o1, Tag o2) {
return o1.getName().compareTo(o2.getName());
}
});
Registry userRegistry = null;
boolean isTenantFlowStarted = false;
String tagsQueryPath = null;
try {
tagsQueryPath = RegistryConstants.QUERIES_COLLECTION_PATH + "/tag-summary";
Map<String, String> params = new HashMap<String, String>();
params.put(RegistryConstants.RESULT_TYPE_PROPERTY_NAME, RegistryConstants.TAG_SUMMARY_RESULT_TYPE);
//as a tenant, I'm browsing my own Store or I'm browsing a Store of another tenant..
if ((this.isTenantModeStoreView && this.tenantDomain==null) || (this.isTenantModeStoreView && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant based store anonymous mode
int tenantId = getTenantId(this.requestedTenant);
userRegistry = ServiceReferenceHolder.getInstance().getRegistryService().
getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId);
} else {
userRegistry = registry;
}
Map<String, Tag> tagsData = new HashMap<String, Tag>();
try {
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(((UserRegistry)userRegistry).getUserName());
if (requestedTenant != null ) {
isTenantFlowStarted = startTenantFlowForTenantDomain(requestedTenant);
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(((UserRegistry)userRegistry).getUserName());
}
Map <String, List<String>> criteriaPublished = new HashMap<String, List<String>>();
criteriaPublished.put(APIConstants.LCSTATE_SEARCH_KEY, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
//rxt api media type
List<TermData> termsPublished = GovernanceUtils
.getTermDataList(criteriaPublished, APIConstants.API_OVERVIEW_TAG,
APIConstants.API_RXT_MEDIA_TYPE, true);
if(termsPublished != null){
for(TermData data : termsPublished){
tempTagSet.add(new Tag(data.getTerm(), (int)data.getFrequency()));
}
}
Map<String, List<String>> criteriaPrototyped = new HashMap<String, List<String>>();
criteriaPrototyped.put(APIConstants.LCSTATE_SEARCH_KEY, new ArrayList<String>() {{
add(APIConstants.PROTOTYPED);
}});
//rxt api media type
List<TermData> termsPrototyped = GovernanceUtils
.getTermDataList(criteriaPrototyped, APIConstants.API_OVERVIEW_TAG,
APIConstants.API_RXT_MEDIA_TYPE, true);
if(termsPrototyped != null){
for(TermData data : termsPrototyped){
tempTagSet.add(new Tag(data.getTerm(), (int)data.getFrequency()));
}
}
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
synchronized (tagCacheMutex) {
lastUpdatedTime = System.currentTimeMillis();
this.tagSet = tempTagSet;
}
} catch (RegistryException e) {
try {
//Before a tenant login to the store or publisher at least one time,
//a registry exception is thrown when the tenant store is accessed in anonymous mode.
//This fix checks whether query resource available in the registry. If not
// give a warn.
if (userRegistry != null && !userRegistry.resourceExists(tagsQueryPath)) {
log.warn("Failed to retrieve tags query resource at " + tagsQueryPath);
return tagSet == null ? Collections.EMPTY_SET : tagSet;
}
} catch (RegistryException e1) {
// Even if we should ignore this exception, we are logging this as a warn log.
// The reason is that, this error happens when we try to add some additional logs in an error
// scenario and it does not affect the execution path.
log.warn("Unable to execute the resource exist method for tags query resource path : " + tagsQueryPath,
e1);
}
handleException("Failed to get all the tags", e);
} catch (UserStoreException e) {
handleException("Failed to get all the tags", e);
}
return tagSet;
}
@Override
public Set<Tag> getTagsWithAttributes(String tenantDomain) throws APIManagementException {
// Fetch the all the tags first.
Set<Tag> tags = getAllTags(tenantDomain);
// For each and every tag get additional attributes from the registry.
String descriptionPathPattern = APIConstants.TAGS_INFO_ROOT_LOCATION + "/%s/description.txt";
String thumbnailPathPattern = APIConstants.TAGS_INFO_ROOT_LOCATION + "/%s/thumbnail.png";
//if the tenantDomain is not specified super tenant domain is used
if (StringUtils.isBlank(tenantDomain)) {
try {
tenantDomain = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getSuperTenantDomain();
} catch (org.wso2.carbon.user.core.UserStoreException e) {
handleException("Cannot get super tenant domain name", e);
}
}
//get the registry instance related to the tenant domain
UserRegistry govRegistry = null;
try {
int tenantId = getTenantId(tenantDomain);
RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService();
govRegistry = registryService.getGovernanceSystemRegistry(tenantId);
} catch (UserStoreException e) {
handleException("Cannot get tenant id for tenant domain name:" + tenantDomain, e);
} catch (RegistryException e) {
handleException("Cannot get registry for tenant domain name:" + tenantDomain, e);
}
if (govRegistry != null) {
for (Tag tag : tags) {
// Get the description.
Resource descriptionResource = null;
String descriptionPath = String.format(descriptionPathPattern, tag.getName());
try {
if (govRegistry.resourceExists(descriptionPath)) {
descriptionResource = govRegistry.get(descriptionPath);
}
} catch (RegistryException e) {
//warn and proceed to the next tag
log.warn(String.format("Error while querying the existence of the description for the tag '%s'",
tag.getName()), e);
}
// The resource is assumed to be a byte array since its the content
// of a text file.
if (descriptionResource != null) {
try {
String description = new String((byte[]) descriptionResource.getContent(),
Charset.defaultCharset());
tag.setDescription(description);
} catch (ClassCastException e) {
//added warnings as it can then proceed to load rest of resources/tags
log.warn(String.format("Cannot cast content of %s to byte[]", descriptionPath), e);
} catch (RegistryException e) {
//added warnings as it can then proceed to load rest of resources/tags
log.warn(String.format("Cannot read content of %s", descriptionPath), e);
}
}
// Checks whether the thumbnail exists.
String thumbnailPath = String.format(thumbnailPathPattern, tag.getName());
try {
boolean isThumbnailExists = govRegistry.resourceExists(thumbnailPath);
tag.setThumbnailExists(isThumbnailExists);
if (isThumbnailExists) {
tag.setThumbnailUrl(APIUtil.getRegistryResourcePathForUI(
APIConstants.RegistryResourceTypesForUI.TAG_THUMBNAIL, tenantDomain, thumbnailPath));
}
} catch (RegistryException e) {
//warn and then proceed to load rest of tags
log.warn(String.format("Error while querying the existence of %s", thumbnailPath), e);
}
}
}
return tags;
}
@Override
public void rateAPI(APIIdentifier apiId, APIRating rating,
String user) throws APIManagementException {
apiMgtDAO.addRating(apiId, rating.getRating(), user);
}
@Override
public void removeAPIRating(APIIdentifier apiId, String user) throws APIManagementException {
apiMgtDAO.removeAPIRating(apiId, user);
}
@Override
public int getUserRating(APIIdentifier apiId, String user) throws APIManagementException {
return apiMgtDAO.getUserRating(apiId, user);
}
@Override
public Set<API> getPublishedAPIsByProvider(String providerId, int limit)
throws APIManagementException {
SortedSet<API> apiSortedSet = new TreeSet<API>(new APINameComparator());
SortedSet<API> apiVersionsSortedSet = new TreeSet<API>(new APIVersionComparator());
try {
Map<String, API> latestPublishedAPIs = new HashMap<String, API>();
List<API> multiVersionedAPIs = new ArrayList<API>();
Comparator<API> versionComparator = new APIVersionComparator();
Boolean displayMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId;
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage =
"Artifact manager is null when retrieving published APIs by provider ID " + providerId;
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION);
if (associations.length < limit || limit == -1) {
limit = associations.length;
}
for (int i = 0; i < limit; i++) {
Association association = associations[i];
String apiPath = association.getDestinationPath();
Resource resource = registry.get(apiPath);
String apiArtifactId = resource.getUUID();
if (apiArtifactId != null) {
GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId);
// check the API status
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!displayAPIsWithMultipleStatus) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!displayMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = latestPublishedAPIs.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
latestPublishedAPIs.put(key, api);
}
} else {
// We haven't seen this API before
latestPublishedAPIs.put(key, api);
}
} else { //If allow showing multiple versions of an API
multiVersionedAPIs.add(api);
}
}
} else {
throw new GovernanceException("artifact id is null of " + apiPath);
}
}
if (!displayMultipleVersions) {
apiSortedSet.addAll(latestPublishedAPIs.values());
return apiSortedSet;
} else {
apiVersionsSortedSet.addAll(multiVersionedAPIs);
return apiVersionsSortedSet;
}
} catch (RegistryException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
}
return null;
}
@Override
public Set<API> getPublishedAPIsByProvider(String providerId, String loggedUsername, int limit, String apiOwner,
String apiBizOwner) throws APIManagementException {
try {
Boolean allowMultipleVersions = APIUtil.isAllowDisplayMultipleVersions();
Boolean showAllAPIs = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
String providerDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerId));
int tenantId = getTenantId(providerDomain);
final Registry registry = ServiceReferenceHolder.getInstance().
getRegistryService().getGovernanceSystemRegistry(tenantId);
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry,
APIConstants.API_KEY);
if (artifactManager == null) {
String errorMessage =
"Artifact manager is null when retrieving all published APIs by provider ID " + providerId;
log.error(errorMessage);
throw new APIManagementException(errorMessage);
}
int publishedAPICount = 0;
Map<String, API> apiCollection = new HashMap<String, API>();
if(apiBizOwner != null && !apiBizOwner.isEmpty()){
try {
final String bizOwner = apiBizOwner;
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_BUSS_OWNER, new ArrayList<String>() {{
add(bizOwner);
}});
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username);
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
if(genericArtifacts != null && genericArtifacts.length > 0){
for(GenericArtifact artifact : genericArtifacts){
if (publishedAPICount >= limit) {
break;
}
if(isCandidateAPI(artifact.getPath(), loggedUsername, artifactManager, tenantId, showAllAPIs,
allowMultipleVersions, apiOwner, providerId, registry, apiCollection)){
publishedAPICount += 1;
}
}
}
} catch (GovernanceException e) {
log.error("Error while finding APIs by business owner " + apiBizOwner, e);
return null;
}
}
else{
String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId;
Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION);
for (Association association : associations) {
if (publishedAPICount >= limit) {
break;
}
String apiPath = association.getDestinationPath();
if(isCandidateAPI(apiPath, loggedUsername, artifactManager, tenantId, showAllAPIs,
allowMultipleVersions, apiOwner, providerId, registry, apiCollection)){
publishedAPICount += 1;
}
}
}
return new HashSet<API>(apiCollection.values());
} catch (RegistryException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
} catch (org.wso2.carbon.user.core.UserStoreException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
} catch (UserStoreException e) {
handleException("Failed to get Published APIs for provider : " + providerId, e);
return null;
}
}
private boolean isCandidateAPI(String apiPath, String loggedUsername, GenericArtifactManager artifactManager,
int tenantId, boolean showAllAPIs, boolean allowMultipleVersions,
String apiOwner, String providerId, Registry registry, Map<String, API> apiCollection)
throws UserStoreException, RegistryException, APIManagementException {
AuthorizationManager manager = ServiceReferenceHolder.getInstance().getRealmService().
getTenantUserRealm(tenantId).getAuthorizationManager();
Comparator<API> versionComparator = new APIVersionComparator();
Resource resource;
String path = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(),
APIUtil.getMountedPath(RegistryContext.getBaseInstance(),
RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) +
apiPath);
boolean checkAuthorized;
String userNameWithoutDomain = loggedUsername;
if (!loggedUsername.isEmpty() && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(super.tenantDomain)) {
String[] nameParts = loggedUsername.split("@");
userNameWithoutDomain = nameParts[0];
}
int loggedInUserTenantDomain = -1;
if(!StringUtils.isEmpty(loggedUsername)) {
loggedInUserTenantDomain = APIUtil.getTenantId(loggedUsername);
}
if (loggedUsername.isEmpty()) {
// Anonymous user is viewing.
checkAuthorized = manager.isRoleAuthorized(APIConstants.ANONYMOUS_ROLE, path, ActionConstants.GET);
} else if (tenantId != loggedInUserTenantDomain) {
//Cross tenant scenario
providerId = APIUtil.replaceEmailDomainBack(providerId);
String[] nameParts = providerId.split("@");
String provideNameWithoutDomain = nameParts[0];
checkAuthorized = manager.isUserAuthorized(provideNameWithoutDomain, path, ActionConstants.GET);
} else {
// Some user is logged in also user and api provider tenant domain are same.
checkAuthorized = manager.isUserAuthorized(userNameWithoutDomain, path, ActionConstants.GET);
}
String apiArtifactId = null;
if (checkAuthorized) {
resource = registry.get(apiPath);
apiArtifactId = resource.getUUID();
}
if (apiArtifactId != null) {
GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId);
// check the API status
String status = APIUtil.getLcStateFromArtifact(artifact);
API api = null;
//Check the api-manager.xml config file entry <DisplayAllAPIs> value is false
if (!showAllAPIs) {
// then we are only interested in published APIs here...
if (APIConstants.PUBLISHED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
} else { // else we are interested in both deprecated/published APIs here...
if (APIConstants.PUBLISHED.equals(status) || APIConstants.DEPRECATED.equals(status)) {
api = APIUtil.getAPI(artifact);
}
}
if (api != null) {
String apiVisibility = api.getVisibility();
if(!StringUtils.isEmpty(apiVisibility) && !APIConstants.API_GLOBAL_VISIBILITY.equalsIgnoreCase(apiVisibility)) {
String providerDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerId));
String loginUserDomain = MultitenantUtils.getTenantDomain(loggedUsername);
if(!StringUtils.isEmpty(providerDomain) && !StringUtils.isEmpty(loginUserDomain)
&& !providerDomain.equals(loginUserDomain)){
return false;
}
}
// apiOwner is the value coming from front end and compared against the API instance
if (apiOwner != null && !apiOwner.isEmpty()) {
if (APIUtil.replaceEmailDomainBack(providerId).equals(APIUtil.replaceEmailDomainBack(apiOwner)) &&
api.getApiOwner() != null && !api.getApiOwner().isEmpty() &&
!APIUtil.replaceEmailDomainBack(apiOwner)
.equals(APIUtil.replaceEmailDomainBack(api.getApiOwner()))) {
return false; // reject remote APIs when local admin user's API selected
} else if (!APIUtil.replaceEmailDomainBack(providerId).equals(APIUtil.replaceEmailDomainBack(apiOwner)) &&
!APIUtil.replaceEmailDomainBack(apiOwner)
.equals(APIUtil.replaceEmailDomainBack(api.getApiOwner()))) {
return false; // reject local admin's APIs when remote API selected
}
}
String key;
//Check the configuration to allow showing multiple versions of an API true/false
if (!allowMultipleVersions) { //If allow only showing the latest version of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName();
API existingAPI = apiCollection.get(key);
if (existingAPI != null) {
// If we have already seen an API with the same name, make sure
// this one has a higher version number
if (versionComparator.compare(api, existingAPI) > 0) {
apiCollection.put(key, api);
return true;
}
} else {
// We haven't seen this API before
apiCollection.put(key, api);
return true;
}
} else { //If allow showing multiple versions of an API
key = api.getId().getProviderName() + COLON_CHAR + api.getId().getApiName() + COLON_CHAR + api.getId()
.getVersion();
//we're not really interested in the key, so generate one for the sake of adding this element to
//the map.
key = key + '_' + apiCollection.size();
apiCollection.put(key, api);
return true;
}
}
}
return false;
}
@Override
public Map<String,Object> searchPaginatedAPIs(String searchTerm, String searchType, String requestedTenantDomain,int start,int end, boolean isLazyLoad)
throws APIManagementException {
Map<String,Object> result = new HashMap<String,Object>();
boolean isTenantFlowStarted = false;
try {
boolean isTenantMode=(requestedTenantDomain != null);
if (isTenantMode && !org.wso2.carbon.base.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(requestedTenantDomain)) {
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(requestedTenantDomain, true);
} else {
requestedTenantDomain = org.wso2.carbon.base.MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(requestedTenantDomain, true);
}
Registry userRegistry;
int tenantIDLocal = 0;
String userNameLocal = this.username;
if ((isTenantMode && this.tenantDomain==null) || (isTenantMode && isTenantDomainNotMatching(requestedTenantDomain))) {//Tenant store anonymous mode
tenantIDLocal = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(requestedTenantDomain);
userRegistry = ServiceReferenceHolder.getInstance().
getRegistryService().getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantIDLocal);
userNameLocal = CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME;
} else {
userRegistry = this.registry;
tenantIDLocal = tenantId;
}
PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(userNameLocal);
if (APIConstants.DOCUMENTATION_SEARCH_TYPE_PREFIX.equalsIgnoreCase(searchType)) {
Map<Documentation, API> apiDocMap =
APIUtil.searchAPIsByDoc(userRegistry, tenantIDLocal, userNameLocal, searchTerm,
APIConstants.STORE_CLIENT);
result.put("apis", apiDocMap);
/*Pagination for Document search results is not supported yet, hence length is sent as end-start*/
if (apiDocMap.isEmpty()) {
result.put("length", 0);
} else {
result.put("length", end-start);
}
}
else if ("subcontext".equalsIgnoreCase(searchType)) {
result = APIUtil.searchAPIsByURLPattern(userRegistry, searchTerm, start,end); ;
}else {
result=searchPaginatedAPIs(userRegistry, searchTerm, searchType,start,end,isLazyLoad);
}
} catch (Exception e) {
handleException("Failed to Search APIs", e);
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
return result;
}
/**
* Pagination API search based on solr indexing
*
* @param registry
* @param searchTerm
* @param searchType
* @return
* @throws APIManagementException
*/
public Map<String,Object> searchPaginatedAPIs(Registry registry, String searchTerm, String searchType,int start,int end, boolean limitAttributes) throws APIManagementException {
SortedSet<API> apiSet = new TreeSet<API>(new APINameComparator());
List<API> apiList = new ArrayList<API>();
searchTerm = searchTerm.trim();
Map<String,Object> result=new HashMap<String, Object>();
int totalLength=0;
boolean isMore = false;
String criteria=APIConstants.API_OVERVIEW_NAME;
try {
String paginationLimit = getAPIManagerConfiguration()
.getFirstProperty(APIConstants.API_STORE_APIS_PER_PAGE);
// If the Config exists use it to set the pagination limit
final int maxPaginationLimit;
if (paginationLimit != null) {
// The additional 1 added to the maxPaginationLimit is to help us determine if more
// APIs may exist so that we know that we are unable to determine the actual total
// API count. We will subtract this 1 later on so that it does not interfere with
// the logic of the rest of the application
int pagination = Integer.parseInt(paginationLimit);
// Because the store jaggery pagination logic is 10 results per a page we need to set pagination
// limit to at least 11 or the pagination done at this level will conflict with the store pagination
// leading to some of the APIs not being displayed
if (pagination < 11) {
pagination = 11;
log.warn("Value of '" + APIConstants.API_STORE_APIS_PER_PAGE + "' is too low, defaulting to 11");
}
maxPaginationLimit = start + pagination + 1;
}
// Else if the config is not specified we go with default functionality and load all
else {
maxPaginationLimit = Integer.MAX_VALUE;
}
GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY);
PaginationContext.init(start, end, "ASC", APIConstants.API_OVERVIEW_NAME, maxPaginationLimit);
if (artifactManager != null) {
if (APIConstants.API_PROVIDER.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_PROVIDER;
searchTerm = searchTerm.replaceAll("@", "-AT-");
} else if (APIConstants.API_VERSION_LABEL.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_VERSION;
} else if (APIConstants.API_CONTEXT.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_CONTEXT;
} else if (APIConstants.API_DESCRIPTION.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_DESCRIPTION;
} else if (APIConstants.API_TAG.equalsIgnoreCase(searchType)) {
criteria = APIConstants.API_OVERVIEW_TAG;
}
//Create the search attribute map for PUBLISHED APIs
final String searchValue = searchTerm;
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(criteria, new ArrayList<String>() {{
add(searchValue);
}});
boolean displayAPIsWithMultipleStatus = APIUtil.isAllowDisplayAPIsWithMultipleStatus();
//This is due to take only the published APIs from the search if there is no need to return APIs with
//multiple status. This is because pagination is breaking when we do a another filtering with the API Status
if (!displayAPIsWithMultipleStatus) {
listMap.put(APIConstants.API_OVERVIEW_STATUS, new ArrayList<String>() {{
add(APIConstants.PUBLISHED);
}});
}
GenericArtifact[] genericArtifacts = artifactManager.findGenericArtifacts(listMap);
totalLength = PaginationContext.getInstance().getLength();
boolean isFound = true;
if (genericArtifacts == null || genericArtifacts.length == 0) {
if (APIConstants.API_OVERVIEW_PROVIDER.equals(criteria)) {
genericArtifacts = searchAPIsByOwner(artifactManager, searchValue);
if (genericArtifacts == null || genericArtifacts.length == 0) {
isFound = false;
}
}
else {
isFound = false;
}
}
if (!isFound) {
result.put("apis", apiSet);
result.put("length", 0);
result.put("isMore", isMore);
return result;
}
// Check to see if we can speculate that there are more APIs to be loaded
if (maxPaginationLimit == totalLength) {
isMore = true; // More APIs exist, cannot determine total API count without incurring perf hit
--totalLength; // Remove the additional 1 added earlier when setting max pagination limit
}
int tempLength =0;
for (GenericArtifact artifact : genericArtifacts) {
String status = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS);
if (APIUtil.isAllowDisplayAPIsWithMultipleStatus()) {
if (APIConstants.PROTOTYPED.equals(status) || APIConstants.PUBLISHED.equals(status)
|| APIConstants.DEPRECATED.equals(status)) {
API resultAPI;
if (limitAttributes) {
resultAPI = APIUtil.getAPI(artifact);
} else {
resultAPI = APIUtil.getAPI(artifact, registry);
}
if (resultAPI != null) {
apiList.add(resultAPI);
}
}
} else {
if (APIConstants.PROTOTYPED.equals(status) || APIConstants.PUBLISHED.equals(status)) {
API resultAPI;
if (limitAttributes) {
resultAPI = APIUtil.getAPI(artifact);
} else {
resultAPI = APIUtil.getAPI(artifact, registry);
}
if (resultAPI != null) {
apiList.add(resultAPI);
}
}
}
// Ensure the APIs returned matches the length, there could be an additional API
// returned due incrementing the pagination limit when getting from registry
tempLength++;
if (tempLength >= totalLength){
break;
}
}
apiSet.addAll(apiList);
}
} catch (RegistryException e) {
handleException("Failed to search APIs with type", e);
}
result.put("apis",apiSet);
result.put("length",totalLength);
result.put("isMore", isMore);
return result;
}
private GenericArtifact[] searchAPIsByOwner(GenericArtifactManager artifactManager, final String searchValue) throws GovernanceException {
Map<String, List<String>> listMap = new HashMap<String, List<String>>();
listMap.put(APIConstants.API_OVERVIEW_OWNER, new ArrayList<String>() {
{
add(searchValue);
}
});
return artifactManager.findGenericArtifacts(listMap);
}
/**
*This method will delete application key mapping table and application registration table.
*@param applicationName application Name
*@param tokenType Token Type.
*@param groupId group id.
*@param userName user name.
*@return
*@throws APIManagementException
*/
@Override
public void cleanUpApplicationRegistration(String applicationName ,String tokenType ,String groupId ,String
userName) throws APIManagementException{
Application application = apiMgtDAO.getApplicationByName(applicationName, userName, groupId);
String applicationId = String.valueOf(application.getId());
apiMgtDAO.deleteApplicationRegistration(applicationId , tokenType);
apiMgtDAO.deleteApplicationKeyMappingByApplicationIdAndType(applicationId, tokenType);
String consumerKey = apiMgtDAO.getConsumerkeyByApplicationIdAndKeyType(applicationId,tokenType);
}
/**
*
* @param jsonString this string will contain oAuth app details
* @param userName user name of logged in user.
* @param clientId this is the consumer key of oAuthApplication
* @param applicationName this is the APIM appication name.
* @param keyType
* @param tokenType this is theApplication Token Type. This can be either default or jwt.
* @return
* @throws APIManagementException
*/
@Override
public Map<String, Object> mapExistingOAuthClient(String jsonString, String userName, String clientId,
String applicationName, String keyType, String tokenType)
throws APIManagementException {
String callBackURL = null;
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, clientId, callBackURL,
"default",
jsonString, tokenType);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
// Checking if clientId is mapped with another application.
if (apiMgtDAO.isMappingExistsforConsumerKey(clientId)) {
String message = "Consumer Key " + clientId + " is used for another Application.";
log.error(message);
throw new APIManagementException(message);
}
log.debug("Client ID not mapped previously with another application.");
//createApplication on oAuthorization server.
OAuthApplicationInfo oAuthApplication = keyManager.mapOAuthApplication(oauthAppRequest);
//Do application mapping with consumerKey.
apiMgtDAO.createApplicationKeyTypeMappingForManualClients(keyType, applicationName, userName, clientId);
AccessTokenInfo tokenInfo;
if (oAuthApplication.getJsonString().contains(APIConstants.GRANT_TYPE_CLIENT_CREDENTIALS)) {
AccessTokenRequest tokenRequest = ApplicationUtils.createAccessTokenRequest(oAuthApplication, null);
tokenInfo = keyManager.getNewApplicationAccessToken(tokenRequest);
} else {
tokenInfo = new AccessTokenInfo();
tokenInfo.setAccessToken("");
tokenInfo.setValidityPeriod(0L);
String[] noScopes = new String[] {"N/A"};
tokenInfo.setScope(noScopes);
oAuthApplication.addParameter("tokenScope", Arrays.toString(noScopes));
}
Map<String, Object> keyDetails = new HashMap<String, Object>();
if (tokenInfo != null) {
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oAuthApplication.getClientId());
keyDetails.put("consumerSecret", oAuthApplication.getParameter("client_secret"));
keyDetails.put("appDetails", oAuthApplication.getJsonString());
return keyDetails;
}
/** returns the SubscribedAPI object which is related to the subscriptionId
*
* @param subscriptionId subscription id
* @return
* @throws APIManagementException
*/
@Override
public SubscribedAPI getSubscriptionById(int subscriptionId) throws APIManagementException {
return apiMgtDAO.getSubscriptionById(subscriptionId);
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber) throws APIManagementException {
return getSubscribedAPIs(subscriber, null);
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String groupingId) throws APIManagementException {
Set<SubscribedAPI> originalSubscribedAPIs;
Set<SubscribedAPI> subscribedAPIs = new HashSet<SubscribedAPI>();
try {
originalSubscribedAPIs = apiMgtDAO.getSubscribedAPIs(subscriber, groupingId);
if (originalSubscribedAPIs != null && !originalSubscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : originalSubscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi.getTier().getName());
subscribedAPIs.add(subscribedApi);
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName(), e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String applicationName, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getSubscribedAPIs(subscriber, applicationName, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationName, e);
}
return subscribedAPIs;
}
@Override
public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, String applicationName,
int startSubIndex, int endSubIndex, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPIs = null;
try {
subscribedAPIs = apiMgtDAO.getPaginatedSubscribedAPIs(subscriber, applicationName, startSubIndex,
endSubIndex, groupingId);
if (subscribedAPIs != null && !subscribedAPIs.isEmpty()) {
Map<String, Tier> tiers = APIUtil.getTiers(tenantId);
for (SubscribedAPI subscribedApi : subscribedAPIs) {
Tier tier = tiers.get(subscribedApi.getTier().getName());
subscribedApi.getTier().setDisplayName(tier != null ? tier.getDisplayName() : subscribedApi
.getTier().getName());
// We do not need to add the modified object again.
// subscribedAPIs.add(subscribedApi);
}
}
} catch (APIManagementException e) {
handleException("Failed to get APIs of " + subscriber.getName() + " under application " + applicationName, e);
}
return subscribedAPIs;
}
public Integer getSubscriptionCount(Subscriber subscriber,String applicationName,String groupingId)
throws APIManagementException {
return apiMgtDAO.getSubscriptionCount(subscriber,applicationName,groupingId);
}
@Override
public Set<APIIdentifier> getAPIByConsumerKey(String accessToken) throws APIManagementException {
try {
return apiMgtDAO.getAPIByConsumerKey(accessToken);
} catch (APIManagementException e) {
handleException("Error while obtaining API from API key", e);
}
return null;
}
@Override
public boolean isSubscribed(APIIdentifier apiIdentifier, String userId)
throws APIManagementException {
boolean isSubscribed;
try {
isSubscribed = apiMgtDAO.isSubscribed(apiIdentifier, userId);
} catch (APIManagementException e) {
String msg = "Failed to check if user(" + userId + ") has subscribed to " + apiIdentifier;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
return isSubscribed;
}
@Override
public SubscriptionResponse addSubscription(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
API api = getAPI(identifier);
WorkflowResponse workflowResponse = null;
int subscriptionId;
String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(userId);
if (APIConstants.PUBLISHED.equals(api.getStatus())) {
subscriptionId = apiMgtDAO.addSubscription(identifier, api.getContext(), applicationId,
APIConstants.SubscriptionStatus.ON_HOLD, tenantAwareUsername);
boolean isTenantFlowStarted = false;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
String applicationName = apiMgtDAO.getApplicationNameFromId(applicationId);
try {
WorkflowExecutor addSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
SubscriptionWorkflowDTO workflowDTO = new SubscriptionWorkflowDTO();
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
workflowDTO.setExternalWorkflowReference(addSubscriptionWFExecutor.generateUUID());
workflowDTO.setWorkflowReference(String.valueOf(subscriptionId));
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
workflowDTO.setCallbackUrl(addSubscriptionWFExecutor.getCallbackURL());
workflowDTO.setApiName(identifier.getApiName());
workflowDTO.setApiContext(api.getContext());
workflowDTO.setApiVersion(identifier.getVersion());
workflowDTO.setApiProvider(identifier.getProviderName());
workflowDTO.setTierName(identifier.getTier());
workflowDTO.setApplicationName(apiMgtDAO.getApplicationNameFromId(applicationId));
workflowDTO.setApplicationId(applicationId);
workflowDTO.setSubscriber(userId);
workflowResponse = addSubscriptionWFExecutor.execute(workflowDTO);
} catch (WorkflowException e) {
//If the workflow execution fails, roll back transaction by removing the subscription entry.
apiMgtDAO.removeSubscriptionById(subscriptionId);
log.error("Could not execute Workflow", e);
throw new APIManagementException("Could not execute Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (APIUtil.isAPIGatewayKeyCacheEnabled()) {
invalidateCachedKeys(applicationId);
}
//to handle on-the-fly subscription rejection (and removal of subscription entry from the database)
//the response should have {"Status":"REJECTED"} in the json payload for this to work.
boolean subscriptionRejected = false;
String subscriptionStatus = null;
String subscriptionUUID = "";
if (workflowResponse != null && workflowResponse.getJSONPayload() != null
&& !workflowResponse.getJSONPayload().isEmpty()) {
try {
JSONObject wfResponseJson = (JSONObject) new JSONParser().parse(workflowResponse.getJSONPayload());
if (APIConstants.SubscriptionStatus.REJECTED.equals(wfResponseJson.get("Status"))) {
subscriptionRejected = true;
subscriptionStatus = APIConstants.SubscriptionStatus.REJECTED;
}
} catch (ParseException e) {
log.error('\'' + workflowResponse.getJSONPayload() + "' is not a valid JSON.", e);
}
}
if (!subscriptionRejected) {
SubscribedAPI addedSubscription = getSubscriptionById(subscriptionId);
subscriptionStatus = addedSubscription.getSubStatus();
subscriptionUUID = addedSubscription.getUUID();
JSONObject subsLogObject = new JSONObject();
subsLogObject.put(APIConstants.AuditLogConstants.API_NAME, identifier.getApiName());
subsLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName());
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_ID, applicationId);
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, applicationName);
subsLogObject.put(APIConstants.AuditLogConstants.TIER, identifier.getTier());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.SUBSCRIPTION, subsLogObject.toString(),
APIConstants.AuditLogConstants.CREATED, this.username);
workflowResponse = new GeneralWorkflowResponse();
}
if (log.isDebugEnabled()) {
String logMessage = "API Name: " + identifier.getApiName() + ", API Version " + identifier.getVersion()
+ ", Subscription Status: " + subscriptionStatus + " subscribe by " + userId
+ " for app " + applicationName;
log.debug(logMessage);
}
return new SubscriptionResponse(subscriptionStatus, subscriptionUUID, workflowResponse);
} else {
throw new APIMgtResourceNotFoundException("Subscriptions not allowed on APIs in the state: " +
api.getStatus());
}
}
@Override
public SubscriptionResponse addSubscription(APIIdentifier identifier, String userId, int applicationId,
String groupId) throws APIManagementException {
boolean isValid = validateApplication(userId, applicationId, groupId);
if (!isValid) {
log.error("Application " + applicationId + " is not accessible to user " + userId);
throw new APIManagementException("Application is not accessible to user " + userId);
}
return addSubscription(identifier, userId, applicationId);
}
/**
* Check whether the application is accessible to the specified user
* @param userId username
* @param applicationId application ID
* @param groupId GroupId list of the application
* @return true if the application is accessible by the specified user
*/
private boolean validateApplication(String userId, int applicationId, String groupId) {
try {
return apiMgtDAO.isAppAllowed(applicationId, userId, groupId);
} catch (APIManagementException e) {
log.error("Error occurred while getting user group id for user: " + userId, e);
}
return false;
}
@Override
public String getSubscriptionStatusById(int subscriptionId) throws APIManagementException {
return apiMgtDAO.getSubscriptionStatusById(subscriptionId);
}
@Override
public void removeSubscription(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
boolean isTenantFlowStarted = false;
String providerTenantDomain = MultitenantUtils.getTenantDomain(APIUtil.
replaceEmailDomainBack(identifier.getProviderName()));
String applicationName = apiMgtDAO.getApplicationNameFromId(applicationId);
try {
if (providerTenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME
.equals(providerTenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(providerTenantDomain, true);
isTenantFlowStarted = true;
}
API api = getAPI(identifier);
SubscriptionWorkflowDTO workflowDTO;
WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
WorkflowExecutor removeSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION);
String workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(identifier, applicationId);
// in a normal flow workflowExtRef is null when workflows are not enabled
if (workflowExtRef == null) {
workflowDTO = new SubscriptionWorkflowDTO();
} else {
workflowDTO = (SubscriptionWorkflowDTO) apiMgtDAO.retrieveWorkflow(workflowExtRef);
// set tiername to the workflowDTO only when workflows are enabled
SubscribedAPI subscription = apiMgtDAO
.getSubscriptionById(Integer.parseInt(workflowDTO.getWorkflowReference()));
workflowDTO.setTierName(subscription.getTier().getName());
}
workflowDTO.setApiProvider(identifier.getProviderName());
workflowDTO.setApiContext(api.getContext());
workflowDTO.setApiName(identifier.getApiName());
workflowDTO.setApiVersion(identifier.getVersion());
workflowDTO.setApplicationName(applicationName);
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
workflowDTO.setExternalWorkflowReference(workflowExtRef);
workflowDTO.setSubscriber(userId);
workflowDTO.setCallbackUrl(removeSubscriptionWFExecutor.getCallbackURL());
workflowDTO.setApplicationId(applicationId);
String status = apiMgtDAO.getSubscriptionStatus(identifier, applicationId);
if (APIConstants.SubscriptionStatus.ON_HOLD.equals(status)) {
try {
createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the deletion process
log.warn("Failed to clean pending subscription approval task");
}
}
// update attributes of the new remove workflow to be created
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setExternalWorkflowReference(removeSubscriptionWFExecutor.generateUUID());
removeSubscriptionWFExecutor.execute(workflowDTO);
JSONObject subsLogObject = new JSONObject();
subsLogObject.put(APIConstants.AuditLogConstants.API_NAME, identifier.getApiName());
subsLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName());
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_ID, applicationId);
subsLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, applicationName);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.SUBSCRIPTION, subsLogObject.toString(),
APIConstants.AuditLogConstants.DELETED, this.username);
} catch (WorkflowException e) {
String errorMsg = "Could not execute Workflow, " + WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_DELETION +
" for apiID " + identifier.getApiName();
handleException(errorMsg, e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (APIUtil.isAPIGatewayKeyCacheEnabled()) {
invalidateCachedKeys(applicationId);
}
if (log.isDebugEnabled()) {
String logMessage = "API Name: " + identifier.getApiName() + ", API Version " +
identifier.getVersion() + " subscription removed from app " + applicationName + " by " + userId;
log.debug(logMessage);
}
}
@Override
public void removeSubscription(APIIdentifier identifier, String userId, int applicationId, String groupId) throws
APIManagementException {
//check application is viewable to logged user
boolean isValid = validateApplication(userId, applicationId, groupId);
if (!isValid) {
log.error("Application " + applicationId + " is not accessible to user " + userId);
throw new APIManagementException("Application is not accessible to user " + userId);
}
removeSubscription(identifier, userId, applicationId);
}
/**
* Removes a subscription specified by SubscribedAPI object
*
* @param subscription SubscribedAPI object
* @throws APIManagementException
*/
@Override
public void removeSubscription(SubscribedAPI subscription) throws APIManagementException {
String uuid = subscription.getUUID();
SubscribedAPI subscribedAPI = apiMgtDAO.getSubscriptionByUUID(uuid);
if (subscribedAPI != null) {
Application application = subscribedAPI.getApplication();
APIIdentifier identifier = subscribedAPI.getApiId();
String userId = application.getSubscriber().getName();
removeSubscription(identifier, userId, application.getId());
if (log.isDebugEnabled()) {
String appName = application.getName();
String logMessage =
"API Name: " + identifier.getApiName() + ", API Version " + identifier.getVersion() +
" subscription (uuid : " + uuid + ") removed from app " + appName;
log.debug(logMessage);
}
} else {
throw new APIManagementException("Subscription for UUID:" + uuid +" does not exist.");
}
}
/**
*
* @param applicationId Application ID related cache keys to be cleared
* @throws APIManagementException
*/
private void invalidateCachedKeys(int applicationId) throws APIManagementException {
CacheInvalidator.getInstance().invalidateCacheForApp(applicationId);
}
@Override
public void removeSubscriber(APIIdentifier identifier, String userId)
throws APIManagementException {
throw new UnsupportedOperationException("Unsubscribe operation is not yet implemented");
}
@Override
public void updateSubscriptions(APIIdentifier identifier, String userId, int applicationId)
throws APIManagementException {
API api = getAPI(identifier);
apiMgtDAO.updateSubscriptions(identifier, api.getContext(), applicationId, userId);
}
@Override
public void addComment(APIIdentifier identifier, String commentText, String user) throws APIManagementException {
apiMgtDAO.addComment(identifier, commentText, user);
}
@Override
public org.wso2.carbon.apimgt.api.model.Comment[] getComments(APIIdentifier identifier)
throws APIManagementException {
return apiMgtDAO.getComments(identifier);
}
/**
* Add a new Application from the store.
* @param application - {@link org.wso2.carbon.apimgt.api.model.Application}
* @param userId - {@link String}
* @return {@link String}
*/
@Override
public int addApplication(Application application, String userId)
throws APIManagementException {
if (application.getName() != null && (application.getName().length() != application.getName().trim().length())) {
handleApplicationNameContainSpacesException("Application name " +
"cannot contain leading or trailing white spaces");
}
String regex = "^[a-zA-Z0-9 ._-]*$";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(application.getName());
if (!matcher.find()) {
handleApplicationNameContainsInvalidCharactersException("Application name contains invalid characters");
}
if (APIUtil.isApplicationExist(userId, application.getName(), application.getGroupId())) {
handleResourceAlreadyExistsException(
"A duplicate application already exists by the name - " + application.getName());
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(application.getCallbackUrl())) {
application.setCallbackUrl(null);
}
int applicationId = apiMgtDAO.addApplication(application, userId);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.CREATED, this.username);
boolean isTenantFlowStarted = false;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
try {
WorkflowExecutor appCreationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
ApplicationWorkflowDTO appWFDto = new ApplicationWorkflowDTO();
appWFDto.setApplication(application);
appWFDto.setExternalWorkflowReference(appCreationWFExecutor.generateUUID());
appWFDto.setWorkflowReference(String.valueOf(applicationId));
appWFDto.setWorkflowType(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
appWFDto.setCallbackUrl(appCreationWFExecutor.getCallbackURL());
appWFDto.setStatus(WorkflowStatus.CREATED);
appWFDto.setTenantDomain(tenantDomain);
appWFDto.setTenantId(tenantId);
appWFDto.setUserName(userId);
appWFDto.setCreatedTime(System.currentTimeMillis());
appCreationWFExecutor.execute(appWFDto);
} catch (WorkflowException e) {
//If the workflow execution fails, roll back transaction by removing the application entry.
application.setId(applicationId);
apiMgtDAO.deleteApplication(application);
log.error("Unable to execute Application Creation Workflow", e);
handleException("Unable to execute Application Creation Workflow", e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (log.isDebugEnabled()) {
log.debug("Application Name: " + application.getName() +" added successfully.");
}
return applicationId;
}
/** Updates an Application identified by its id
*
* @param application Application object to be updated
* @throws APIManagementException
*/
@Override
public void updateApplication(Application application) throws APIManagementException {
Application existingApp;
String uuid = application.getUUID();
if (!StringUtils.isEmpty(uuid)) {
existingApp = apiMgtDAO.getApplicationByUUID(uuid);
if (existingApp != null) {
Set<APIKey> keys = getApplicationKeys(existingApp.getId());
for (APIKey key : keys) {
existingApp.addKey(key);
}
}
application.setId(existingApp.getId());
} else {
existingApp = apiMgtDAO.getApplicationById(application.getId());
}
if (existingApp != null && APIConstants.ApplicationStatus.APPLICATION_CREATED.equals(existingApp.getStatus())) {
throw new APIManagementException("Cannot update the application while it is INACTIVE");
}
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().
equalsIgnoreCase(existingApp.getSubscriber().getName());
} else {
isUserAppOwner = application.getSubscriber().getName().equals(existingApp.getSubscriber().getName());
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + application.getSubscriber().getName() + ", " +
"attempted to update application owned by: " + existingApp.getSubscriber().getName());
}
if (application.getName() != null && (application.getName().length() != application.getName().trim().length())) {
handleApplicationNameContainSpacesException("Application name " +
"cannot contain leading or trailing white spaces");
}
String regex = "^[a-zA-Z0-9 ._-]*$";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(application.getName());
if (!matcher.find()) {
handleApplicationNameContainsInvalidCharactersException("Application name contains invalid characters");
}
apiMgtDAO.updateApplication(application);
if (log.isDebugEnabled()) {
log.debug("Successfully updated the Application: " + application.getId() +" in the database.");
}
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.STATUS, existingApp != null ? existingApp.getStatus() : "");
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
try {
invalidateCachedKeys(application.getId());
} catch (APIManagementException ignore) {
//Log and ignore since we do not want to throw exceptions to the front end due to cache invalidation failure.
log.warn("Failed to invalidate Gateway Cache " + ignore.getMessage(), ignore);
}
}
/**
* Function to remove an Application from the API Store
*
* @param application - The Application Object that represents the Application
* @param username
* @throws APIManagementException
*/
@Override
public void removeApplication(Application application, String username) throws APIManagementException {
String uuid = application.getUUID();
if (application.getId() == 0 && !StringUtils.isEmpty(uuid)) {
application = apiMgtDAO.getApplicationByUUID(uuid);
if (application != null) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
}
boolean isTenantFlowStarted = false;
int applicationId = application.getId();
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().equalsIgnoreCase(username);
} else {
isUserAppOwner = application.getSubscriber().getName().equals(username);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + username + ", " +
"attempted to remove application owned by: " + application.getSubscriber().getName());
}
try {
String workflowExtRef;
ApplicationWorkflowDTO workflowDTO;
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
isTenantFlowStarted = true;
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
WorkflowExecutor createApplicationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION);
WorkflowExecutor createProductionRegistrationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
WorkflowExecutor createSandboxRegistrationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
WorkflowExecutor removeApplicationWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION);
workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceByApplicationID(application.getId());
// in a normal flow workflowExtRef is null when workflows are not enabled
if (workflowExtRef == null) {
workflowDTO = new ApplicationWorkflowDTO();
} else {
workflowDTO = (ApplicationWorkflowDTO) apiMgtDAO.retrieveWorkflow(workflowExtRef);
}
workflowDTO.setApplication(application);
workflowDTO.setCallbackUrl(removeApplicationWFExecutor.getCallbackURL());
workflowDTO.setUserName(this.username);
workflowDTO.setTenantDomain(tenantDomain);
workflowDTO.setTenantId(tenantId);
// Remove from cache first since we won't be able to find active access tokens
// once the application is removed.
invalidateCachedKeys(application.getId());
// clean up pending subscription tasks
Set<Integer> pendingSubscriptions = apiMgtDAO.getPendingSubscriptionsByApplicationId(applicationId);
for (int subscription : pendingSubscriptions) {
try {
workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(subscription);
createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for subscription " + subscription);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending subscription approval task: " + subscription);
}
}
// cleanup pending application registration tasks
String productionKeyStatus = apiMgtDAO
.getRegistrationApprovalState(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
String sandboxKeyStatus = apiMgtDAO
.getRegistrationApprovalState(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
if (WorkflowStatus.CREATED.toString().equals(productionKeyStatus)) {
try {
workflowExtRef = apiMgtDAO
.getRegistrationWFReference(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
createProductionRegistrationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for production key of application "
+ applicationId);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending production key approval task of " + applicationId);
}
}
if (WorkflowStatus.CREATED.toString().equals(sandboxKeyStatus)) {
try {
workflowExtRef = apiMgtDAO
.getRegistrationWFReference(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
createSandboxRegistrationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (APIManagementException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to get external workflow reference for sandbox key of application "
+ applicationId);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending sandbox key approval task of " + applicationId);
}
}
if (workflowExtRef != null) {
try {
createApplicationWFExecutor.cleanUpPendingTask(workflowExtRef);
} catch (WorkflowException ex) {
// failed cleanup processes are ignored to prevent failing the application removal process
log.warn("Failed to clean pending application approval task of " + applicationId);
}
}
// update attributes of the new remove workflow to be created
workflowDTO.setStatus(WorkflowStatus.CREATED);
workflowDTO.setCreatedTime(System.currentTimeMillis());
workflowDTO.setWorkflowType(WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION);
workflowDTO.setExternalWorkflowReference(removeApplicationWFExecutor.generateUUID());
removeApplicationWFExecutor.execute(workflowDTO);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.NAME, application.getName());
appLogObject.put(APIConstants.AuditLogConstants.TIER, application.getTier());
appLogObject.put(APIConstants.AuditLogConstants.CALLBACK, application.getCallbackUrl());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.DELETED, this.username);
} catch (WorkflowException e) {
String errorMsg = "Could not execute Workflow, " + WorkflowConstants.WF_TYPE_AM_APPLICATION_DELETION + " " +
"for applicationID " + application.getId();
handleException(errorMsg, e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
if (log.isDebugEnabled()) {
String logMessage = "Application Name: " + application.getName() + " successfully removed";
log.debug(logMessage);
}
}
/**
* This method specifically implemented for REST API by removing application and data access logic
* from host object layer. So as per new implementation we need to pass requested scopes to this method
* as tokenScope. So we will do scope related other logic here in this method.
* So host object should only pass required 9 parameters.
* */
@Override
public Map<String, Object> requestApprovalForApplicationRegistration(String userId, String applicationName,
String tokenType, String callbackUrl,
String[] allowedDomains, String validityTime,
String tokenScope, String groupingId,
String jsonString
)
throws APIManagementException {
boolean isTenantFlowStarted = false;
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
int tenantId = MultitenantConstants.INVALID_TENANT_ID;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Unable to retrieve the tenant information of the current user.", e);
}
//checking for authorized scopes
Set<Scope> scopeSet = new LinkedHashSet<Scope>();
List<Scope> authorizedScopes = new ArrayList<Scope>();
String authScopeString;
if (tokenScope != null && tokenScope.length() != 0 &&
!APIConstants.OAUTH2_DEFAULT_SCOPE.equals(tokenScope)) {
scopeSet.addAll(getScopesByScopeKeys(tokenScope, tenantId));
authorizedScopes = getAllowedScopesForUserApplication(userId, scopeSet);
}
if (!authorizedScopes.isEmpty()) {
Set<Scope> authorizedScopeSet = new HashSet<Scope>(authorizedScopes);
StringBuilder scopeBuilder = new StringBuilder();
for (Scope scope : authorizedScopeSet) {
scopeBuilder.append(scope.getKey()).append(' ');
}
authScopeString = scopeBuilder.toString();
} else {
authScopeString = APIConstants.OAUTH2_DEFAULT_SCOPE;
}
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
// initiate WorkflowExecutor
WorkflowExecutor appRegistrationWorkflow = null;
// initiate ApplicationRegistrationWorkflowDTO
ApplicationRegistrationWorkflowDTO appRegWFDto = null;
ApplicationKeysDTO appKeysDto = new ApplicationKeysDTO();
// get APIM application by Application Name and userId.
Application application = ApplicationUtils.retrieveApplication(applicationName, userId, groupingId);
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = application.getSubscriber().getName().equalsIgnoreCase(userId);
} else {
isUserAppOwner = application.getSubscriber().getName().equals(userId);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + application.getSubscriber().getName() + ", " +
"attempted to generate tokens for application owned by: " + userId);
}
// if its a PRODUCTION application.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
// initiate workflow type. By default simple work flow will be
// executed.
appRegistrationWorkflow =
getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
appRegWFDto =
(ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
}// if it is a sandBox application.
else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) { // if
// its
// a
// SANDBOX
// application.
appRegistrationWorkflow =
getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
appRegWFDto =
(ApplicationRegistrationWorkflowDTO) WorkflowExecutorFactory.getInstance()
.createWorkflowDTO(WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
} else {
throw new APIManagementException("Invalid Token Type '" + tokenType + "' requested.");
}
//check whether callback url is empty and set null
if (StringUtils.isBlank(callbackUrl)) {
callbackUrl = null;
}
String applicationTokenType = application.getTokenType();
if (StringUtils.isEmpty(application.getTokenType())) {
applicationTokenType = APIConstants.DEFAULT_TOKEN_TYPE;
}
// Build key manager instance and create oAuthAppRequest by jsonString.
OAuthAppRequest request =
ApplicationUtils.createOauthAppRequest(applicationName, null,
callbackUrl, authScopeString, jsonString, applicationTokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.VALIDITY_PERIOD, validityTime);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
request.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_CALLBACK_URL, callbackUrl);
// Setting request values in WorkflowDTO - In future we should keep
// Application/OAuthApplication related
// information in the respective entities not in the workflowDTO.
appRegWFDto.setStatus(WorkflowStatus.CREATED);
appRegWFDto.setCreatedTime(System.currentTimeMillis());
appRegWFDto.setTenantDomain(tenantDomain);
appRegWFDto.setTenantId(tenantId);
appRegWFDto.setExternalWorkflowReference(appRegistrationWorkflow.generateUUID());
appRegWFDto.setWorkflowReference(appRegWFDto.getExternalWorkflowReference());
appRegWFDto.setApplication(application);
request.setMappingId(appRegWFDto.getWorkflowReference());
if (!application.getSubscriber().getName().equals(userId)) {
appRegWFDto.setUserName(application.getSubscriber().getName());
} else {
appRegWFDto.setUserName(userId);
}
appRegWFDto.setCallbackUrl(appRegistrationWorkflow.getCallbackURL());
appRegWFDto.setAppInfoDTO(request);
appRegWFDto.setDomainList(allowedDomains);
appRegWFDto.setKeyDetails(appKeysDto);
appRegistrationWorkflow.execute(appRegWFDto);
Map<String, Object> keyDetails = new HashMap<String, Object>();
keyDetails.put("keyState", appRegWFDto.getStatus().toString());
OAuthApplicationInfo applicationInfo = appRegWFDto.getApplicationInfo();
if (applicationInfo != null) {
keyDetails.put("consumerKey", applicationInfo.getClientId());
keyDetails.put("consumerSecret", applicationInfo.getClientSecret());
keyDetails.put("appDetails", applicationInfo.getJsonString());
}
// There can be instances where generating the Application Token is
// not required. In those cases,
// token info will have nothing.
AccessTokenInfo tokenInfo = appRegWFDto.getAccessTokenInfo();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", tokenInfo.getValidityPeriod());
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
keyDetails.put("tokenScope", tokenInfo.getScopes());
}
JSONObject appLogObject = new JSONObject();
appLogObject.put("Generated keys for application", application.getName());
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return keyDetails;
} catch (WorkflowException e) {
log.error("Could not execute Workflow", e);
throw new APIManagementException(e);
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
private static List<Scope> getAllowedScopesForUserApplication(String username,
Set<Scope> reqScopeSet) {
String[] userRoles = null;
org.wso2.carbon.user.api.UserStoreManager userStoreManager = null;
List<Scope> authorizedScopes = new ArrayList<Scope>();
try {
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(MultitenantUtils.getTenantDomain(username));
userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager();
userRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername(username));
} catch (org.wso2.carbon.user.api.UserStoreException e) {
// Log and return since we do not want to stop issuing the token in
// case of scope validation failures.
log.error("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
}
List<String> userRoleList;
if (userRoles != null) {
userRoleList = new ArrayList<String>(Arrays.asList(userRoles));
} else {
userRoleList = Collections.emptyList();
}
//Iterate the requested scopes list.
for (Scope scope : reqScopeSet) {
//Get the set of roles associated with the requested scope.
String roles = scope.getRoles();
//If the scope has been defined in the context of the App and if roles have been defined for the scope
if (roles != null && roles.length() != 0) {
List<String> roleList =
new ArrayList<String>(Arrays.asList(roles.replaceAll(" ", EMPTY_STRING).split(",")));
//Check if user has at least one of the roles associated with the scope
roleList.retainAll(userRoleList);
if (!roleList.isEmpty()) {
authorizedScopes.add(scope);
}
}
}
return authorizedScopes;
}
@Override
public Map<String, String> completeApplicationRegistration(String userId, String applicationName, String tokenType,
String tokenScope, String groupingId)
throws APIManagementException {
Application application = apiMgtDAO.getApplicationByName(applicationName, userId, groupingId);
String status = apiMgtDAO.getRegistrationApprovalState(application.getId(), tokenType);
Map<String, String> keyDetails = null;
if (!application.getSubscriber().getName().equals(userId)) {
userId = application.getSubscriber().getName();
}
String workflowReference = apiMgtDAO.getWorkflowReference(applicationName, userId);
if (workflowReference != null) {
WorkflowDTO workflowDTO = null;
// Creating workflowDTO for the correct key type.
if (APIConstants.API_KEY_TYPE_PRODUCTION.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_PRODUCTION);
} else if (APIConstants.API_KEY_TYPE_SANDBOX.equals(tokenType)) {
workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(
WorkflowConstants.WF_TYPE_AM_APPLICATION_REGISTRATION_SANDBOX);
}
if (workflowDTO != null) {
// Set the workflow reference in the workflow dto and the populate method will fill in other details
// using the persisted request.
ApplicationRegistrationWorkflowDTO registrationWorkflowDTO = (ApplicationRegistrationWorkflowDTO)
workflowDTO;
registrationWorkflowDTO.setExternalWorkflowReference(workflowReference);
if (APIConstants.AppRegistrationStatus.REGISTRATION_APPROVED.equals(status)) {
apiMgtDAO.populateAppRegistrationWorkflowDTO(registrationWorkflowDTO);
try {
AbstractApplicationRegistrationWorkflowExecutor.dogenerateKeysForApplication
(registrationWorkflowDTO);
AccessTokenInfo tokenInfo = registrationWorkflowDTO.getAccessTokenInfo();
OAuthApplicationInfo oauthApp = registrationWorkflowDTO.getApplicationInfo();
keyDetails = new HashMap<String, String>();
if (tokenInfo != null) {
keyDetails.put("accessToken", tokenInfo.getAccessToken());
keyDetails.put("validityTime", Long.toString(tokenInfo.getValidityPeriod()));
keyDetails.put("tokenDetails", tokenInfo.getJSONString());
}
keyDetails.put("consumerKey", oauthApp.getClientId());
keyDetails.put("consumerSecret", oauthApp.getClientSecret());
keyDetails.put("appDetails", oauthApp.getJsonString());
} catch (APIManagementException e) {
APIUtil.handleException("Error occurred while Creating Keys.", e);
}
}
}
}
return keyDetails;
}
/**
*
* @param userId APIM subscriber user ID.
* @param ApplicationName APIM application name.
* @return
* @throws APIManagementException
*/
@Override
public Application getApplicationsByName(String userId, String ApplicationName, String groupingId) throws
APIManagementException {
Application application = apiMgtDAO.getApplicationByName(ApplicationName, userId,groupingId);
if (application != null) {
checkAppAttributes(application, userId);
}
application = apiMgtDAO.getApplicationWithOAuthApps(ApplicationName, userId, groupingId);
if (application != null) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return application;
}
/**
* Returns the corresponding application given the Id
* @param id Id of the Application
* @return it will return Application corresponds to the id.
* @throws APIManagementException
*/
@Override
public Application getApplicationById(int id) throws APIManagementException {
Application application = apiMgtDAO.getApplicationById(id);
String userId = application.getSubscriber().getName();
checkAppAttributes(application, userId);
return apiMgtDAO.getApplicationById(id);
}
/** get the status of the Application creation process given the application Id
*
* @param applicationId Id of the Application
* @return
* @throws APIManagementException
*/
@Override
public String getApplicationStatusById(int applicationId) throws APIManagementException {
return apiMgtDAO.getApplicationStatusById(applicationId);
}
@Override
public boolean isApplicationTokenExists(String accessToken) throws APIManagementException {
return apiMgtDAO.isAccessTokenExists(accessToken);
}
@Override
public Set<SubscribedAPI> getSubscribedIdentifiers(Subscriber subscriber, APIIdentifier identifier, String groupingId)
throws APIManagementException {
Set<SubscribedAPI> subscribedAPISet = new HashSet<>();
Set<SubscribedAPI> subscribedAPIs = getSubscribedAPIs(subscriber, groupingId);
for (SubscribedAPI api : subscribedAPIs) {
if (api.getApiId().equals(identifier)) {
Set<APIKey> keys = getApplicationKeys(api.getApplication().getId());
for (APIKey key : keys) {
api.addKey(key);
}
subscribedAPISet.add(api);
}
}
return subscribedAPISet;
}
/**
* Returns a list of tiers denied
*
* @return Set<Tier>
*/
@Override
public Set<String> getDeniedTiers() throws APIManagementException {
// '0' is passed as argument whenever tenant id of logged in user is needed
return getDeniedTiers(0);
}
/**
* Returns a list of tiers denied
* @param apiProviderTenantId tenant id of API provider
* @return Set<Tier>
*/
@Override
public Set<String> getDeniedTiers(int apiProviderTenantId) throws APIManagementException {
Set<String> deniedTiers = new HashSet<String>();
String[] currentUserRoles;
if (apiProviderTenantId == 0) {
apiProviderTenantId = tenantId;
}
try {
if (apiProviderTenantId != 0) {
/* Get the roles of the Current User */
currentUserRoles = ((UserRegistry) ((UserAwareAPIConsumer) this).registry).
getUserRealm().getUserStoreManager().getRoleListOfUser(((UserRegistry) this.registry)
.getUserName());
Set<TierPermissionDTO> tierPermissions;
if (APIUtil.isAdvanceThrottlingEnabled()) {
tierPermissions = apiMgtDAO.getThrottleTierPermissions(apiProviderTenantId);
} else {
tierPermissions = apiMgtDAO.getTierPermissions(apiProviderTenantId);
}
for (TierPermissionDTO tierPermission : tierPermissions) {
String type = tierPermission.getPermissionType();
List<String> currentRolesList = new ArrayList<String>(Arrays.asList(currentUserRoles));
List<String> roles = new ArrayList<String>(Arrays.asList(tierPermission.getRoles()));
currentRolesList.retainAll(roles);
if (APIConstants.TIER_PERMISSION_ALLOW.equals(type)) {
/* Current User is not allowed for this Tier*/
if (currentRolesList.isEmpty()) {
deniedTiers.add(tierPermission.getTierName());
}
} else {
/* Current User is denied for this Tier*/
if (currentRolesList.size() > 0) {
deniedTiers.add(tierPermission.getTierName());
}
}
}
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("cannot retrieve user role list for tenant" + tenantDomain, e);
}
return deniedTiers;
}
@Override
public Set<TierPermission> getTierPermissions() throws APIManagementException {
Set<TierPermission> tierPermissions = new HashSet<TierPermission>();
if (tenantId != 0) {
Set<TierPermissionDTO> tierPermissionDtos;
if (APIUtil.isAdvanceThrottlingEnabled()) {
tierPermissionDtos = apiMgtDAO.getThrottleTierPermissions(tenantId);
} else {
tierPermissionDtos = apiMgtDAO.getTierPermissions(tenantId);
}
for (TierPermissionDTO tierDto : tierPermissionDtos) {
TierPermission tierPermission = new TierPermission(tierDto.getTierName());
tierPermission.setRoles(tierDto.getRoles());
tierPermission.setPermissionType(tierDto.getPermissionType());
tierPermissions.add(tierPermission);
}
}
return tierPermissions;
}
/**
* Check whether given Tier is denied for the user
*
* @param tierName
* @return
* @throws APIManagementException if failed to get the tiers
*/
@Override
public boolean isTierDeneid(String tierName) throws APIManagementException {
String[] currentUserRoles;
try {
if (tenantId != 0) {
/* Get the roles of the Current User */
currentUserRoles = ((UserRegistry) ((UserAwareAPIConsumer) this).registry).
getUserRealm().getUserStoreManager().getRoleListOfUser(((UserRegistry) this.registry).getUserName());
TierPermissionDTO tierPermission;
if(APIUtil.isAdvanceThrottlingEnabled()){
tierPermission = apiMgtDAO.getThrottleTierPermission(tierName, tenantId);
}else{
tierPermission = apiMgtDAO.getTierPermission(tierName, tenantId);
}
if (tierPermission == null) {
return false;
} else {
List<String> currentRolesList = new ArrayList<String>(Arrays.asList(currentUserRoles));
List<String> roles = new ArrayList<String>(Arrays.asList(tierPermission.getRoles()));
currentRolesList.retainAll(roles);
if (APIConstants.TIER_PERMISSION_ALLOW.equals(tierPermission.getPermissionType())) {
if (currentRolesList.isEmpty()) {
return true;
}
} else {
if (currentRolesList.size() > 0) {
return true;
}
}
}
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("cannot retrieve user role list for tenant" + tenantDomain, e);
}
return false;
}
private boolean isTenantDomainNotMatching(String tenantDomain) {
if (this.tenantDomain != null) {
return !(this.tenantDomain.equals(tenantDomain));
}
return true;
}
@Override
public Set<API> searchAPI(String searchTerm, String searchType, String tenantDomain)
throws APIManagementException {
return null;
}
public Set<Scope> getScopesBySubscribedAPIs(List<APIIdentifier> identifiers)
throws APIManagementException {
return apiMgtDAO.getScopesBySubscribedAPIs(identifiers);
}
public String getScopesByToken(String accessToken) throws APIManagementException {
return null;
}
public Set<Scope> getScopesByScopeKeys(String scopeKeys, int tenantId)
throws APIManagementException {
return apiMgtDAO.getScopesByScopeKeys(scopeKeys, tenantId);
}
@Override
public String getGroupId(int appId) throws APIManagementException {
return apiMgtDAO.getGroupId(appId);
}
@Override
public String[] getGroupIds(String response) throws APIManagementException {
String groupingExtractorClass = APIUtil.getGroupingExtractorImplementation();
if (groupingExtractorClass != null) {
try {
LoginPostExecutor groupingExtractor = (LoginPostExecutor) APIUtil.getClassForName
(groupingExtractorClass).newInstance();
//switching 2.1.0 and 2.2.0
if (APIUtil.isMultiGroupAppSharingEnabled()) {
NewPostLoginExecutor newGroupIdListExtractor = (NewPostLoginExecutor) groupingExtractor;
return newGroupIdListExtractor.getGroupingIdentifierList(response);
} else {
String groupId = groupingExtractor.getGroupingIdentifiers(response);
return new String[] {groupId};
}
} catch (ClassNotFoundException e) {
String msg = groupingExtractorClass + " is not found in runtime";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (ClassCastException e) {
String msg = "Cannot cast " + groupingExtractorClass + " NewPostLoginExecutor";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (IllegalAccessException e) {
String msg = "Error occurred while invocation of getGroupingIdentifier method";
log.error(msg, e);
throw new APIManagementException(msg, e);
} catch (InstantiationException e) {
String msg = "Error occurred while instantiating " + groupingExtractorClass + " class";
log.error(msg, e);
throw new APIManagementException(msg, e);
}
}
return null;
}
/**
* Returns all applications associated with given subscriber, groupingId and search criteria.
*
* @param subscriber Subscriber
* @param groupingId The groupId to which the applications must belong.
* @param offset The offset.
* @param search The search string.
* @param sortColumn The sort column.
* @param sortOrder The sort order.
* @return Application[] The Applications.
* @throws APIManagementException
*/
@Override
public Application[] getApplicationsWithPagination(Subscriber subscriber, String groupingId, int start , int offset
, String search, String sortColumn, String sortOrder)
throws APIManagementException {
return apiMgtDAO.getApplicationsWithPagination(subscriber, groupingId, start, offset,
search, sortColumn, sortOrder);
}
/**
* Returns all applications associated with given subscriber and groupingId.
*
* @param subscriber The subscriber.
* @param groupingId The groupId to which the applications must belong.
* @return Application[] Array of applications.
* @throws APIManagementException
*/
@Override
public Application[] getApplications(Subscriber subscriber, String groupingId)
throws APIManagementException {
Application[] applications = apiMgtDAO.getApplications(subscriber, groupingId);
for (Application application : applications) {
Set<APIKey> keys = getApplicationKeys(application.getId());
for (APIKey key : keys) {
application.addKey(key);
}
}
return applications;
}
/**
* Returns all API keys associated with given application id.
*
* @param applicationId The id of the application.
* @return Set<APIKey> Set of API keys of the application.
* @throws APIManagementException
*/
protected Set<APIKey> getApplicationKeys(int applicationId) throws APIManagementException {
Set<APIKey> apiKeys = new HashSet<APIKey>();
APIKey productionKey = getApplicationKey(applicationId, APIConstants.API_KEY_TYPE_PRODUCTION);
if (productionKey != null) {
apiKeys.add(productionKey);
} else {
productionKey = apiMgtDAO.getKeyStatusOfApplication(APIConstants.API_KEY_TYPE_PRODUCTION, applicationId);
if (productionKey != null) {
productionKey.setType(APIConstants.API_KEY_TYPE_PRODUCTION);
apiKeys.add(productionKey);
}
}
APIKey sandboxKey = getApplicationKey(applicationId, APIConstants.API_KEY_TYPE_SANDBOX);
if (sandboxKey != null) {
apiKeys.add(sandboxKey);
} else {
sandboxKey = apiMgtDAO.getKeyStatusOfApplication(APIConstants.API_KEY_TYPE_SANDBOX, applicationId);
if (sandboxKey != null) {
sandboxKey.setType(APIConstants.API_KEY_TYPE_SANDBOX);
apiKeys.add(sandboxKey);
}
}
return apiKeys;
}
/**
* Returns the key associated with given application id and key type.
*
* @param applicationId Id of the Application.
* @param keyType The type of key.
* @return APIKey The key of the application.
* @throws APIManagementException
*/
protected APIKey getApplicationKey(int applicationId, String keyType) throws APIManagementException {
String consumerKey = apiMgtDAO.getConsumerkeyByApplicationIdAndKeyType(String.valueOf(applicationId), keyType);
if (StringUtils.isNotEmpty(consumerKey)) {
String consumerKeyStatus = apiMgtDAO.getKeyStatusOfApplication(keyType, applicationId).getState();
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
OAuthApplicationInfo oAuthApplicationInfo = keyManager.retrieveApplication(consumerKey);
AccessTokenInfo tokenInfo = keyManager.getAccessTokenByConsumerKey(consumerKey);
APIKey apiKey = new APIKey();
apiKey.setConsumerKey(consumerKey);
apiKey.setType(keyType);
apiKey.setState(consumerKeyStatus);
if (oAuthApplicationInfo != null) {
apiKey.setConsumerSecret(oAuthApplicationInfo.getClientSecret());
apiKey.setCallbackUrl(oAuthApplicationInfo.getCallBackURL());
if (oAuthApplicationInfo.getParameter(APIConstants.JSON_GRANT_TYPES) != null) {
apiKey.setGrantTypes(oAuthApplicationInfo.getParameter(APIConstants.JSON_GRANT_TYPES).toString());
}
}
if (tokenInfo != null) {
apiKey.setAccessToken(tokenInfo.getAccessToken());
apiKey.setValidityPeriod(tokenInfo.getValidityPeriod());
apiKey.setTokenScope(getScopeString(tokenInfo.getScopes()));
} else {
if (log.isDebugEnabled()) {
log.debug("Access token does not exist for Consumer Key: " + consumerKey);
}
}
return apiKey;
}
if (log.isDebugEnabled()) {
log.debug("Consumer key does not exist for Application Id: " + applicationId + " Key Type: " + keyType);
}
return null;
}
/**
* Returns a single string containing the provided array of scopes.
*
* @param scopes The array of scopes.
* @return String Single string containing the provided array of scopes.
*/
private String getScopeString(String[] scopes) {
return StringUtils.join(scopes, " ");
}
@Override
public Application[] getLightWeightApplications(Subscriber subscriber, String groupingId) throws
APIManagementException {
return apiMgtDAO.getLightWeightApplications(subscriber, groupingId);
}
/**
* @param userId Subscriber name.
* @param applicationName of the Application.
* @param tokenType Token type (PRODUCTION | SANDBOX)
* @param callbackUrl callback URL
* @param allowedDomains allowedDomains for token.
* @param validityTime validity time period.
* @param groupingId APIM application id.
* @param jsonString Callback URL for the Application.
* @param tokenScope Scopes for the requested tokens.
* @return
* @throws APIManagementException
*/
@Override
public OAuthApplicationInfo updateAuthClient(String userId, String applicationName,
String tokenType,
String callbackUrl, String[] allowedDomains,
String validityTime,
String tokenScope,
String groupingId,
String jsonString) throws APIManagementException {
boolean tenantFlowStarted = false;
try {
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
tenantFlowStarted = true;
}
Application application = ApplicationUtils.retrieveApplication(applicationName, userId, groupingId);
final String subscriberName = application.getSubscriber().getName();
boolean isCaseInsensitiveComparisons = Boolean.parseBoolean(getAPIManagerConfiguration().
getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS));
boolean isUserAppOwner;
if (isCaseInsensitiveComparisons) {
isUserAppOwner = subscriberName.equalsIgnoreCase(userId);
} else {
isUserAppOwner = subscriberName.equals(userId);
}
if (!isUserAppOwner) {
throw new APIManagementException("user: " + userId + ", attempted to update OAuth application " +
"owned by: " + subscriberName);
}
//Create OauthAppRequest object by passing json String.
OAuthAppRequest oauthAppRequest = ApplicationUtils.createOauthAppRequest(applicationName, null, callbackUrl,
tokenScope, jsonString, application.getTokenType());
oauthAppRequest.getOAuthApplicationInfo().addParameter(ApplicationConstants.APP_KEY_TYPE, tokenType);
String consumerKey = apiMgtDAO.getConsumerKeyForApplicationKeyType(applicationName, userId, tokenType,
groupingId);
oauthAppRequest.getOAuthApplicationInfo().setClientId(consumerKey);
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//call update method.
OAuthApplicationInfo updatedAppInfo = keyManager.updateApplication(oauthAppRequest);
JSONObject appLogObject = new JSONObject();
appLogObject.put(APIConstants.AuditLogConstants.APPLICATION_NAME, updatedAppInfo.getClientName());
appLogObject.put("Updated Oauth app with Call back URL", callbackUrl);
appLogObject.put("Updated Oauth app with grant types", jsonString);
APIUtil.logAuditMessage(APIConstants.AuditLogConstants.APPLICATION, appLogObject.toString(),
APIConstants.AuditLogConstants.UPDATED, this.username);
return updatedAppInfo;
} finally {
if (tenantFlowStarted) {
endTenantFlow();
}
}
}
/**
* This method perform delete oAuth application.
*
* @param consumerKey
* @throws APIManagementException
*/
@Override
public void deleteOAuthApplication(String consumerKey) throws APIManagementException {
//get key manager instance.
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
//delete oAuthApplication by calling key manager implementation
keyManager.deleteApplication(consumerKey);
Map<String, String> applicationIdAndTokenTypeMap =
apiMgtDAO.getApplicationIdAndTokenTypeByConsumerKey(consumerKey);
if (applicationIdAndTokenTypeMap != null) {
String applicationId = applicationIdAndTokenTypeMap.get("application_id");
String tokenType = applicationIdAndTokenTypeMap.get("token_type");
if (applicationId != null && tokenType != null) {
apiMgtDAO.deleteApplicationKeyMappingByConsumerKey(consumerKey);
apiMgtDAO.deleteApplicationRegistration(applicationId, tokenType);
}
}
}
@Override
public Application[] getApplicationsByOwner(String userId) throws APIManagementException {
return apiMgtDAO.getApplicationsByOwner(userId);
}
@Override
public boolean updateApplicationOwner(String userId, Application application) throws APIManagementException {
boolean isAppUpdated = false;
try {
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(MultitenantUtils.getTenantDomain(username));
UserStoreManager userStoreManager = realmService.getTenantUserRealm(tenantId).getUserStoreManager();
String oldUserName = application.getSubscriber().getName();
String[] oldUserRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername
(oldUserName));
String[] newUserRoles = userStoreManager.getRoleListOfUser(MultitenantUtils.getTenantAwareUsername
(userId));
List<String> roleList = new ArrayList<String>();
roleList.addAll(Arrays.asList(newUserRoles));
for (String role : oldUserRoles) {
if (role.contains(application.getName())) {
roleList.add(role);
}
}
String[] roleArr = roleList.toArray(new String[roleList.size()]);
APIManagerConfiguration config = getAPIManagerConfiguration();
String serverURL = config.getFirstProperty(APIConstants.AUTH_MANAGER_URL) + "UserAdmin";
String adminUsername = config.getFirstProperty(APIConstants.AUTH_MANAGER_USERNAME);
String adminPassword = config.getFirstProperty(APIConstants.AUTH_MANAGER_PASSWORD);
UserAdminStub userAdminStub = new UserAdminStub(serverURL);
CarbonUtils.setBasicAccessSecurityHeaders(adminUsername, adminPassword, userAdminStub._getServiceClient());
userAdminStub.updateRolesOfUser(userId, roleArr);
isAppUpdated = true;
} catch (org.wso2.carbon.user.api.UserStoreException e) {
handleException("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
} catch (RemoteException e) {
handleException("Server couldn't establish connection with auth manager ", e);
} catch (UserAdminUserAdminException e) {
handleException("Error when getting the tenant's UserStoreManager or when getting roles of user ", e);
}
if (isAppUpdated) {
isAppUpdated = apiMgtDAO.updateApplicationOwner(userId, application);
}
//todo update Outh application once the oauth component supports to update the owner
return isAppUpdated;
}
public JSONObject resumeWorkflow(Object[] args) {
JSONObject row = new JSONObject();
if (args != null && APIUtil.isStringArray(args)) {
String workflowReference = (String) args[0];
String status = (String) args[1];
String description = null;
if (args.length > 2 && args[2] != null) {
description = (String) args[2];
}
boolean isTenantFlowStarted = false;
try {
// if (workflowReference != null) {
WorkflowDTO workflowDTO = apiMgtDAO.retrieveWorkflow(workflowReference);
if (workflowDTO == null) {
log.error("Could not find workflow for reference " + workflowReference);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", "Could not find workflow for reference " + workflowReference);
return row;
}
String tenantDomain = workflowDTO.getTenantDomain();
if (tenantDomain != null && !org.wso2.carbon.utils.multitenancy.MultitenantConstants
.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = startTenantFlowForTenantDomain(tenantDomain);
}
workflowDTO.setWorkflowDescription(description);
workflowDTO.setStatus(WorkflowStatus.valueOf(status));
String workflowType = workflowDTO.getWorkflowType();
WorkflowExecutor workflowExecutor;
try {
workflowExecutor = getWorkflowExecutor(workflowType);
workflowExecutor.complete(workflowDTO);
} catch (WorkflowException e) {
throw new APIManagementException(e);
}
row.put("error", Boolean.FALSE);
row.put("statusCode", 200);
row.put("message", "Invoked workflow completion successfully.");
// }
} catch (IllegalArgumentException e) {
String msg = "Illegal argument provided. Valid values for status are APPROVED and REJECTED.";
log.error(msg, e);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", msg);
} catch (APIManagementException e) {
String msg = "Error while resuming the workflow. ";
log.error(msg, e);
row.put("error", Boolean.TRUE);
row.put("statusCode", 500);
row.put("message", msg + e.getMessage());
} finally {
if (isTenantFlowStarted) {
endTenantFlow();
}
}
}
return row;
}
protected void endTenantFlow() {
PrivilegedCarbonContext.endTenantFlow();
}
protected boolean startTenantFlowForTenantDomain(String tenantDomain) {
boolean isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
return isTenantFlowStarted;
}
/**
* Returns a workflow executor
*
* @param workflowType Workflow executor type
* @return WorkflowExecutor of given type
* @throws WorkflowException if an error occurred while getting WorkflowExecutor
*/
protected WorkflowExecutor getWorkflowExecutor(String workflowType) throws WorkflowException {
return WorkflowExecutorFactory.getInstance().getWorkflowExecutor(workflowType);
}
@Override
public boolean isMonetizationEnabled(String tenantDomain) throws APIManagementException {
JSONObject apiTenantConfig = null;
try {
String content = apimRegistryService.getConfigRegistryResourceContent(tenantDomain, APIConstants.API_TENANT_CONF_LOCATION);
if (content != null) {
JSONParser parser = new JSONParser();
apiTenantConfig = (JSONObject) parser.parse(content);
}
} catch (UserStoreException e) {
handleException("UserStoreException thrown when getting API tenant config from registry", e);
} catch (RegistryException e) {
handleException("RegistryException thrown when getting API tenant config from registry", e);
} catch (ParseException e) {
handleException("ParseException thrown when passing API tenant config from registry", e);
}
return getTenantConfigValue(tenantDomain, apiTenantConfig, APIConstants.API_TENANT_CONF_ENABLE_MONITZATION_KEY);
}
private boolean getTenantConfigValue(String tenantDomain, JSONObject apiTenantConfig, String configKey) throws APIManagementException {
if (apiTenantConfig != null) {
Object value = apiTenantConfig.get(configKey);
if (value != null) {
return Boolean.parseBoolean(value.toString());
}
else {
throw new APIManagementException(configKey + " config does not exist for tenant " + tenantDomain);
}
}
return false;
}
/**
* To get the query to retrieve user role list query based on current role list.
*
* @return the query with user role list.
* @throws APIManagementException API Management Exception.
*/
private String getUserRoleListQuery() throws APIManagementException {
StringBuilder rolesQuery = new StringBuilder();
rolesQuery.append('(');
rolesQuery.append(APIConstants.NULL_USER_ROLE_LIST);
String[] userRoles = APIUtil.getListOfRoles((userNameWithoutChange != null)? userNameWithoutChange: username);
if (userRoles != null) {
for (String userRole : userRoles) {
rolesQuery.append(" OR ");
rolesQuery.append(ClientUtils.escapeQueryChars(APIUtil.sanitizeUserRole(userRole.toLowerCase())));
}
}
rolesQuery.append(")");
if(log.isDebugEnabled()) {
log.debug("User role list solr query " + APIConstants.STORE_VIEW_ROLES + "=" + rolesQuery.toString());
}
return APIConstants.STORE_VIEW_ROLES + "=" + rolesQuery.toString();
}
/**
* To get the current user's role list.
*
* @return user role list.
* @throws APIManagementException API Management Exception.
*/
private List<String> getUserRoleList() throws APIManagementException {
List<String> userRoleList;
if (userNameWithoutChange == null) {
userRoleList = new ArrayList<String>() {{
add(APIConstants.NULL_USER_ROLE_LIST);
}};
} else {
userRoleList = new ArrayList<String>(Arrays.asList(APIUtil.getListOfRoles(userNameWithoutChange)));
}
return userRoleList;
}
@Override
protected String getSearchQuery(String searchQuery) throws APIManagementException {
if (!isAccessControlRestrictionEnabled || ( userNameWithoutChange != null &&
APIUtil.hasPermission(userNameWithoutChange, APIConstants.Permissions
.APIM_ADMIN))) {
return searchQuery;
}
String criteria = getUserRoleListQuery();
if (searchQuery != null && !searchQuery.trim().isEmpty()) {
criteria = criteria + "&" + searchQuery;
}
return criteria;
}
@Override
public String getWSDLDocument(String username, String tenantDomain, String resourceUrl,
Map environmentDetails, Map apiDetails) throws APIManagementException {
if (username == null) {
username = APIConstants.END_USER_ANONYMOUS;
}
if (tenantDomain == null) {
tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
Map<String, Object> docResourceMap = APIUtil.getDocument(username, resourceUrl, tenantDomain);
String wsdlContent = "";
if (log.isDebugEnabled()) {
log.debug("WSDL document resource availability: " + docResourceMap.isEmpty());
}
if (!docResourceMap.isEmpty()) {
try {
ByteArrayOutputStream arrayOutputStream = new ByteArrayOutputStream();
IOUtils.copy((InputStream) docResourceMap.get("Data"), arrayOutputStream);
String apiName = (String) apiDetails.get(API_NAME);
String apiVersion = (String) apiDetails.get(API_VERSION);
String apiProvider = (String) apiDetails.get(API_PROVIDER);
String environmentName = (String) environmentDetails.get(ENVIRONMENT_NAME);
String environmentType = (String) environmentDetails.get(ENVIRONMENT_TYPE);
if (log.isDebugEnabled()) {
log.debug("Published SOAP api gateway environment name: " + environmentName + " environment type: "
+ environmentType);
}
byte[] updatedWSDLContent = this.getUpdatedWSDLByEnvironment(resourceUrl,
arrayOutputStream.toByteArray(), environmentName, environmentType, apiName, apiVersion, apiProvider);
wsdlContent = new String(updatedWSDLContent);
} catch (IOException e) {
handleException("Error occurred while copying wsdl content into byte array stream for resource: "
+ resourceUrl, e);
}
} else {
handleException("No wsdl resource found for resource path: " + resourceUrl);
}
JSONObject data = new JSONObject();
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE,
docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE));
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME,
docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME));
data.put(APIConstants.DOCUMENTATION_RESOURCE_MAP_DATA, wsdlContent);
if (log.isDebugEnabled()) {
log.debug("Updated wsdl content details for wsdl resource: " + docResourceMap.get("name") + " is " +
data.toJSONString());
}
return data.toJSONString();
}
/**
* To check authorization of the API against current logged in user. If the user is not authorized an exception
* will be thrown.
*
* @param identifier API identifier
* @throws APIManagementException APIManagementException
*/
protected void checkAccessControlPermission(APIIdentifier identifier) throws APIManagementException {
if (identifier == null || !isAccessControlRestrictionEnabled) {
if (!isAccessControlRestrictionEnabled && log.isDebugEnabled() && identifier != null) {
log.debug(
"Publisher access control restriction is not enabled. Hence the API " + identifier.getApiName()
+ " should not be checked for further permission. Registry permission check "
+ "is sufficient");
}
return;
}
String apiPath = APIUtil.getAPIPath(identifier);
Registry registry;
try {
// Need user name with tenant domain to get correct domain name from
// MultitenantUtils.getTenantDomain(username)
String userNameWithTenantDomain = (userNameWithoutChange != null) ? userNameWithoutChange : username;
String apiTenantDomain = getTenantDomain(identifier);
int apiTenantId = getTenantManager().getTenantId(apiTenantDomain);
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(apiTenantDomain)) {
APIUtil.loadTenantRegistry(apiTenantId);
}
if (this.tenantDomain == null || !this.tenantDomain.equals(apiTenantDomain)) { //cross tenant scenario
registry = getRegistryService().getGovernanceUserRegistry(
getTenantAwareUsername(APIUtil.replaceEmailDomainBack(identifier.getProviderName())),
apiTenantId);
} else {
registry = this.registry;
}
Resource apiResource = registry.get(apiPath);
String accessControlProperty = apiResource.getProperty(APIConstants.ACCESS_CONTROL);
if (accessControlProperty == null || accessControlProperty.trim().isEmpty() || accessControlProperty
.equalsIgnoreCase(APIConstants.NO_ACCESS_CONTROL)) {
if (log.isDebugEnabled()) {
log.debug("API in the path " + apiPath + " does not have any access control restriction");
}
return;
}
if (APIUtil.hasPermission(userNameWithTenantDomain, APIConstants.Permissions.APIM_ADMIN)) {
return;
}
String storeVisibilityRoles = apiResource.getProperty(APIConstants.STORE_VIEW_ROLES);
if (storeVisibilityRoles != null && !storeVisibilityRoles.trim().isEmpty()) {
String[] storeVisibilityRoleList = storeVisibilityRoles.split(",");
int index = 0;
for (String role: storeVisibilityRoleList) {
storeVisibilityRoleList[index] = role.trim();
index++;
}
if (log.isDebugEnabled()) {
log.debug("API has restricted access to users with the roles : " + Arrays
.toString(storeVisibilityRoleList));
}
String[] userRoleList = APIUtil.getListOfRoles(userNameWithTenantDomain);
if (log.isDebugEnabled()) {
log.debug("User " + username + " has roles " + Arrays.toString(userRoleList));
}
for (String role : storeVisibilityRoleList) {
if (role.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST) || APIUtil
.compareRoleList(userRoleList, role)) {
return;
}
}
if (log.isDebugEnabled()) {
log.debug("API " + identifier + " cannot be accessed by user '" + username + "'. It "
+ "has a store visibility restriction");
}
throw new APIManagementException(
APIConstants.UN_AUTHORIZED_ERROR_MESSAGE + " view the API " + identifier);
}
} catch (RegistryException e) {
throw new APIManagementException(
"Registry Exception while trying to check the store visibility restriction of API " + identifier
.getApiName(), e);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
String msg = "Failed to get API from : " + apiPath;
log.error(msg, e);
throw new APIManagementException(msg, e);
}
}
/**
* This method is used to get the updated wsdl with the respective environment apis are published
*
* @param wsdlResourcePath registry resource path to the wsdl
* @param wsdlContent wsdl resource content as byte array
* @param environmentType gateway environment type
* @return updated wsdl content with environment endpoints
* @throws APIManagementException
*/
private byte[] getUpdatedWSDLByEnvironment(String wsdlResourcePath, byte[] wsdlContent, String environmentName,
String environmentType, String apiName, String apiVersion, String apiProvider) throws APIManagementException {
APIMWSDLReader apimwsdlReader = new APIMWSDLReader(wsdlResourcePath);
Definition definition = apimwsdlReader.getWSDLDefinitionFromByteContent(wsdlContent, false);
byte[] updatedWSDLContent = null;
boolean isTenantFlowStarted = false;
try {
String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(apiProvider));
if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
isTenantFlowStarted = true;
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true);
}
RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService();
int tenantId;
UserRegistry registry;
try {
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
APIUtil.loadTenantRegistry(tenantId);
registry = registryService.getGovernanceSystemRegistry(tenantId);
API api = null;
if (!StringUtils.isEmpty(apiName) && !StringUtils.isEmpty(apiVersion)) {
APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(apiProvider), apiName, apiVersion);
if (log.isDebugEnabled()) {
log.debug("Api identifier for the soap api artifact: " + apiIdentifier + "for api name: "
+ apiName + ", version: " + apiVersion);
}
GenericArtifact apiArtifact = APIUtil.getAPIArtifact(apiIdentifier, registry);
api = APIUtil.getAPI(apiArtifact);
if (log.isDebugEnabled()) {
if (api != null) {
log.debug(
"Api context for the artifact with id:" + api.getId() + " is " + api.getContext());
} else {
log.debug("Api does not exist for api name: " + apiIdentifier.getApiName());
}
}
} else {
handleException("Artifact does not exist in the registry for api name: " + apiName +
" and version: " + apiVersion);
}
if (api != null) {
try {
apimwsdlReader.setServiceDefinition(definition, api, environmentName, environmentType);
if (log.isDebugEnabled()) {
log.debug("Soap api with context:" + api.getContext() + " in " + environmentName
+ " with environment type" + environmentType);
}
updatedWSDLContent = apimwsdlReader.getWSDL(definition);
} catch (APIManagementException e) {
handleException("Error occurred while processing the wsdl for api: " + api.getId());
}
} else {
handleException("Error while getting API object for wsdl artifact");
}
} catch (UserStoreException e) {
handleException("Error while reading tenant information", e);
} catch (RegistryException e) {
handleException("Error when create registry instance", e);
}
} finally {
if (isTenantFlowStarted) {
PrivilegedCarbonContext.endTenantFlow();
}
}
return updatedWSDLContent;
}
/**
* This method is used to get keys of custom attributes, configured by user
*
* @param userId user name of logged in user
* @return Array of JSONObject, contains keys of attributes
* @throws APIManagementException
*/
public JSONArray getAppAttributesFromConfig(String userId) throws APIManagementException {
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
int tenantId = 0;
try {
tenantId = getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Error in getting tenantId of " + tenantDomain, e);
}
JSONArray applicationAttributes = null;
JSONObject applicationConfig = APIUtil.getAppAttributeKeysFromRegistry(tenantId);
try {
if (applicationConfig != null) {
applicationAttributes = (JSONArray) applicationConfig.get(APIConstants.ApplicationAttributes.ATTRIBUTES);
} else {
APIManagerConfiguration configuration = getAPIManagerConfiguration();
applicationAttributes = configuration.getApplicationAttributes();
}
} catch (NullPointerException e){
handleException("Error in reading configuration " + e.getMessage(), e);
}
return applicationAttributes;
}
/**
* This method is used to validate keys of custom attributes, configured by user
*
* @param application
* @param userId user name of logged in user
* @throws APIManagementException
*/
public void checkAppAttributes(Application application, String userId) throws APIManagementException {
JSONArray applicationAttributesFromConfig = getAppAttributesFromConfig(userId);
Map<String, String> applicationAttributes = application.getApplicationAttributes();
List attributeKeys = new ArrayList<String>();
int applicationId = application.getId();
int tenantId = 0;
Map<String, String> newApplicationAttributes = new HashMap<>();
String tenantDomain = MultitenantUtils.getTenantDomain(userId);
try {
tenantId = getTenantId(tenantDomain);
} catch (UserStoreException e) {
handleException("Error in getting tenantId of " + tenantDomain, e);
}
for (Object object : applicationAttributesFromConfig) {
JSONObject attribute = (JSONObject) object;
attributeKeys.add(attribute.get(APIConstants.ApplicationAttributes.ATTRIBUTE));
}
for (Object key : applicationAttributes.keySet()) {
if (!attributeKeys.contains(key)) {
apiMgtDAO.deleteApplicationAttributes((String) key, applicationId);
if (log.isDebugEnabled()) {
log.debug("Removing " + key + "from application - " + application.getName());
}
}
}
for (Object key : attributeKeys) {
if (!applicationAttributes.keySet().contains(key)) {
newApplicationAttributes.put((String) key, "");
}
}
apiMgtDAO.addApplicationAttributes(newApplicationAttributes, applicationId, tenantId);
}
}
| Removed the duplicated code
| components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIConsumerImpl.java | Removed the duplicated code | <ide><path>omponents/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIConsumerImpl.java
<ide> String storeVisibilityRoles = apiResource.getProperty(APIConstants.STORE_VIEW_ROLES);
<ide> if (storeVisibilityRoles != null && !storeVisibilityRoles.trim().isEmpty()) {
<ide> String[] storeVisibilityRoleList = storeVisibilityRoles.split(",");
<del> int index = 0;
<del> for (String role: storeVisibilityRoleList) {
<del> storeVisibilityRoleList[index] = role.trim();
<del> index++;
<del> }
<ide> if (log.isDebugEnabled()) {
<ide> log.debug("API has restricted access to users with the roles : " + Arrays
<ide> .toString(storeVisibilityRoleList));
<ide> log.debug("User " + username + " has roles " + Arrays.toString(userRoleList));
<ide> }
<ide> for (String role : storeVisibilityRoleList) {
<add> role = role.trim();
<ide> if (role.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST) || APIUtil
<ide> .compareRoleList(userRoleList, role)) {
<ide> return; |
|
Java | apache-2.0 | error: pathspec 'webappender/src/test/java/com/clescot/webappender/filter/ThresholdFilterBuilderTest.java' did not match any file(s) known to git
| 2c6bef6e78e370450b935ae3fbaad819a356536b | 1 | clescot/webappender | package com.clescot.webappender.filter;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.filter.ThresholdFilter;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.classic.spi.LoggingEvent;
import ch.qos.logback.core.filter.Filter;
import ch.qos.logback.core.spi.FilterReply;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import static org.fest.assertions.Assertions.assertThat;
@RunWith(Enclosed.class)
public class ThresholdFilterBuilderTest {
public static class BuildFilters {
private static Logger LOGGER = (Logger) LoggerFactory.getLogger(BuildFilters.class);
@Test
public void test_with_empty_values() throws Exception {
//given
ThresholdFilterBuilder thresholdFilterBuilder = new ThresholdFilterBuilder();
HashMap<String, List<String>> headers = Maps.newHashMap();
List<String> values = Lists.newArrayList();
headers.put(ThresholdFilterBuilder.X_THRESHOLD_FILTER, values);
//when
List<? extends Filter<ILoggingEvent>> filters = thresholdFilterBuilder.buildFilters(headers);
//then
assertThat(filters).isEmpty();
}
@Test
public void test_empty_headers() throws Exception {
//given
ThresholdFilterBuilder thresholdFilterBuilder = new ThresholdFilterBuilder();
//when
List<? extends Filter<ILoggingEvent>> filters = thresholdFilterBuilder.buildFilters(Maps.<String, List<String>>newHashMap());
//then
assertThat(filters).isEmpty();
}
@Test
public void test_neutral() throws Exception {
//given
ThresholdFilterBuilder thresholdFilterBuilder = new ThresholdFilterBuilder();
HashMap<String, List<String>> headers = Maps.newHashMap();
List<String> values = Lists.newArrayList();
values.add("INFO");
headers.put(ThresholdFilterBuilder.X_THRESHOLD_FILTER, values);
//when
List<? extends Filter<ILoggingEvent>> filters = thresholdFilterBuilder.buildFilters(headers);
//then
assertThat(filters).isNotEmpty();
assertThat(filters).hasSize(1);
Filter<ILoggingEvent> filter = filters.get(0);
assertThat(filter).isInstanceOf(ThresholdFilter.class);
ThresholdFilter thresholdFilter = (ThresholdFilter) filter;
ILoggingEvent event = new LoggingEvent("", LOGGER, Level.INFO, "message", null, null);
assertThat(thresholdFilter.decide(event)).isEqualTo(FilterReply.NEUTRAL);
}
@Test
public void test_deny() throws Exception {
//given
ThresholdFilterBuilder thresholdFilterBuilder = new ThresholdFilterBuilder();
HashMap<String, List<String>> headers = Maps.newHashMap();
List<String> values = Lists.newArrayList();
values.add("WARN");
headers.put(ThresholdFilterBuilder.X_THRESHOLD_FILTER, values);
//when
List<? extends Filter<ILoggingEvent>> filters = thresholdFilterBuilder.buildFilters(headers);
//then
assertThat(filters).isNotEmpty();
assertThat(filters).hasSize(1);
Filter<ILoggingEvent> filter = filters.get(0);
assertThat(filter).isInstanceOf(ThresholdFilter.class);
ThresholdFilter thresholdFilter = (ThresholdFilter) filter;
ILoggingEvent event = new LoggingEvent("", LOGGER, Level.INFO, "message", null, null);
assertThat(thresholdFilter.decide(event)).isEqualTo(FilterReply.DENY);
}
}
}
| webappender/src/test/java/com/clescot/webappender/filter/ThresholdFilterBuilderTest.java | add unit test
| webappender/src/test/java/com/clescot/webappender/filter/ThresholdFilterBuilderTest.java | add unit test | <ide><path>ebappender/src/test/java/com/clescot/webappender/filter/ThresholdFilterBuilderTest.java
<add>package com.clescot.webappender.filter;
<add>
<add>
<add>import ch.qos.logback.classic.Level;
<add>import ch.qos.logback.classic.Logger;
<add>import ch.qos.logback.classic.filter.ThresholdFilter;
<add>import ch.qos.logback.classic.spi.ILoggingEvent;
<add>import ch.qos.logback.classic.spi.LoggingEvent;
<add>import ch.qos.logback.core.filter.Filter;
<add>import ch.qos.logback.core.spi.FilterReply;
<add>import com.google.common.collect.Lists;
<add>import com.google.common.collect.Maps;
<add>import org.junit.Test;
<add>import org.junit.experimental.runners.Enclosed;
<add>import org.junit.runner.RunWith;
<add>import org.slf4j.LoggerFactory;
<add>
<add>import java.util.HashMap;
<add>import java.util.List;
<add>
<add>import static org.fest.assertions.Assertions.assertThat;
<add>
<add>@RunWith(Enclosed.class)
<add>public class ThresholdFilterBuilderTest {
<add>
<add> public static class BuildFilters {
<add>
<add> private static Logger LOGGER = (Logger) LoggerFactory.getLogger(BuildFilters.class);
<add>
<add> @Test
<add> public void test_with_empty_values() throws Exception {
<add> //given
<add> ThresholdFilterBuilder thresholdFilterBuilder = new ThresholdFilterBuilder();
<add> HashMap<String, List<String>> headers = Maps.newHashMap();
<add> List<String> values = Lists.newArrayList();
<add> headers.put(ThresholdFilterBuilder.X_THRESHOLD_FILTER, values);
<add> //when
<add> List<? extends Filter<ILoggingEvent>> filters = thresholdFilterBuilder.buildFilters(headers);
<add>
<add> //then
<add> assertThat(filters).isEmpty();
<add> }
<add>
<add>
<add> @Test
<add> public void test_empty_headers() throws Exception {
<add> //given
<add> ThresholdFilterBuilder thresholdFilterBuilder = new ThresholdFilterBuilder();
<add>
<add> //when
<add> List<? extends Filter<ILoggingEvent>> filters = thresholdFilterBuilder.buildFilters(Maps.<String, List<String>>newHashMap());
<add>
<add> //then
<add> assertThat(filters).isEmpty();
<add>
<add> }
<add>
<add> @Test
<add> public void test_neutral() throws Exception {
<add> //given
<add> ThresholdFilterBuilder thresholdFilterBuilder = new ThresholdFilterBuilder();
<add> HashMap<String, List<String>> headers = Maps.newHashMap();
<add> List<String> values = Lists.newArrayList();
<add> values.add("INFO");
<add> headers.put(ThresholdFilterBuilder.X_THRESHOLD_FILTER, values);
<add> //when
<add> List<? extends Filter<ILoggingEvent>> filters = thresholdFilterBuilder.buildFilters(headers);
<add>
<add> //then
<add> assertThat(filters).isNotEmpty();
<add> assertThat(filters).hasSize(1);
<add> Filter<ILoggingEvent> filter = filters.get(0);
<add> assertThat(filter).isInstanceOf(ThresholdFilter.class);
<add> ThresholdFilter thresholdFilter = (ThresholdFilter) filter;
<add> ILoggingEvent event = new LoggingEvent("", LOGGER, Level.INFO, "message", null, null);
<add>
<add> assertThat(thresholdFilter.decide(event)).isEqualTo(FilterReply.NEUTRAL);
<add>
<add> }
<add>
<add> @Test
<add> public void test_deny() throws Exception {
<add> //given
<add> ThresholdFilterBuilder thresholdFilterBuilder = new ThresholdFilterBuilder();
<add> HashMap<String, List<String>> headers = Maps.newHashMap();
<add> List<String> values = Lists.newArrayList();
<add> values.add("WARN");
<add> headers.put(ThresholdFilterBuilder.X_THRESHOLD_FILTER, values);
<add> //when
<add> List<? extends Filter<ILoggingEvent>> filters = thresholdFilterBuilder.buildFilters(headers);
<add>
<add> //then
<add> assertThat(filters).isNotEmpty();
<add> assertThat(filters).hasSize(1);
<add> Filter<ILoggingEvent> filter = filters.get(0);
<add> assertThat(filter).isInstanceOf(ThresholdFilter.class);
<add> ThresholdFilter thresholdFilter = (ThresholdFilter) filter;
<add> ILoggingEvent event = new LoggingEvent("", LOGGER, Level.INFO, "message", null, null);
<add>
<add> assertThat(thresholdFilter.decide(event)).isEqualTo(FilterReply.DENY);
<add>
<add> }
<add> }
<add>} |
|
Java | agpl-3.0 | 7c4624f772669fcbacfe3566b1c88cfcfee5bab5 | 0 | Kunagi/kunagi,Kunagi/kunagi,Kunagi/kunagi,JavierPeris/kunagi,JavierPeris/kunagi,Kunagi/kunagi,JavierPeris/kunagi,Kunagi/kunagi,JavierPeris/kunagi,JavierPeris/kunagi | package scrum.client.common;
import ilarkesto.gwt.client.Gwt;
import java.util.Set;
import scrum.client.admin.User;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.Widget;
public class UsersOnBlockWidget extends AScrumWidget {
private AScrumGwtEntity entity;
private FlowPanel panel;
public UsersOnBlockWidget(AScrumGwtEntity entity) {
super();
this.entity = entity;
}
@Override
protected Widget onInitialization() {
panel = new FlowPanel();
panel.setStyleName("UsersOnBlockWidget");
return panel;
}
@Override
protected void onUpdate() {
panel.clear();
Set<User> users = getCurrentProject().getUsersSelecting(entity);
boolean first = true;
for (User user : users) {
if (user == getCurrentUser()) continue;
if (first) {
first = false;
} else {
panel.add(new Label(", "));
}
Label label = Gwt.createInline(user.getName());
label.getElement().getStyle().setProperty("color", user.getProjectConfig().getColor());
panel.add(label);
}
}
}
| src/main/java/scrum/client/common/UsersOnBlockWidget.java | package scrum.client.common;
import java.util.Set;
import scrum.client.admin.User;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.Widget;
public class UsersOnBlockWidget extends AScrumWidget {
private AScrumGwtEntity entity;
private FlowPanel panel;
public UsersOnBlockWidget(AScrumGwtEntity entity) {
super();
this.entity = entity;
}
@Override
protected Widget onInitialization() {
panel = new FlowPanel();
panel.setStyleName("UsersOnBlockWidget");
return panel;
}
@Override
protected void onUpdate() {
panel.clear();
Set<User> users = getCurrentProject().getUsersSelecting(entity);
boolean first = true;
for (User user : users) {
if (user == getCurrentUser()) continue;
if (first) {
first = false;
} else {
panel.add(new Label(", "));
}
Label label = new Label(user.getName());
label.getElement().getStyle().setProperty("color", user.getProjectConfig().getColor());
panel.add(label);
}
}
}
| bugfix: users on block float
git-svn-id: 5d16f60e415856c0d9146e9340794746a8f5c347@2838 15bc23fa-3125-0410-a1f3-bdc40dbf1900
| src/main/java/scrum/client/common/UsersOnBlockWidget.java | bugfix: users on block float | <ide><path>rc/main/java/scrum/client/common/UsersOnBlockWidget.java
<ide> package scrum.client.common;
<add>
<add>import ilarkesto.gwt.client.Gwt;
<ide>
<ide> import java.util.Set;
<ide>
<ide> panel.add(new Label(", "));
<ide> }
<ide>
<del> Label label = new Label(user.getName());
<add> Label label = Gwt.createInline(user.getName());
<ide> label.getElement().getStyle().setProperty("color", user.getProjectConfig().getColor());
<ide> panel.add(label);
<ide> } |
|
JavaScript | mit | 205b3fe5718fabb9bb2465147a2dc9ff65bbdf68 | 0 | tinymce/tinymce,danielpunkass/tinymce,TeamupCom/tinymce,danielpunkass/tinymce,tinymce/tinymce,gencer/tinymce,FernCreek/tinymce,FernCreek/tinymce,leofeyer/tinymce,leofeyer/tinymce,tinymce/tinymce,TeamupCom/tinymce,FernCreek/tinymce,danielpunkass/tinymce,gencer/tinymce | asynctest(
'Browser Test: ui.ListTest',
[
'ephox.agar.api.GeneralSteps',
'ephox.agar.api.Pipeline',
'ephox.alloy.api.behaviour.Behaviour',
'ephox.alloy.api.behaviour.Replacing',
'ephox.alloy.api.component.GuiFactory',
'ephox.alloy.api.component.Memento',
'ephox.alloy.api.system.Attachment',
'ephox.alloy.api.system.Gui',
'ephox.alloy.test.GuiSetup',
'ephox.katamari.api.Fun',
'ephox.mcagar.api.TinyApis',
'ephox.mcagar.api.TinyLoader',
'ephox.sugar.api.node.Body',
'ephox.sugar.api.search.Traverse',
'tinymce.themes.mobile.features.Features',
'tinymce.themes.mobile.test.theme.TestTheme',
'tinymce.themes.mobile.test.ui.TestUi',
'tinymce.themes.mobile.util.FormatChangers'
],
function (
GeneralSteps, Pipeline, Behaviour, Replacing, GuiFactory, Memento, Attachment, Gui, GuiSetup, Fun, TinyApis, TinyLoader, Body, Traverse, Features, TestTheme,
TestUi, FormatChangers
) {
var success = arguments[arguments.length - 2];
var failure = arguments[arguments.length - 1];
/* This test is going to create a toolbar with both list items on it */
var alloy = Gui.create();
var body = Body.body();
Attachment.attachSystem(body, alloy);
var toolbar = GuiFactory.build({
dom: {
tag: 'div',
classes: [ 'test-toolbar' ]
},
behaviours: Behaviour.derive([
Replacing.config({ })
])
});
var socket = GuiFactory.build({
dom: {
tag: 'div',
classes: [ 'test-socket' ]
}
});
alloy.add(toolbar);
alloy.add(socket);
TestTheme.setup(alloy, socket);
var realm = {
system: Fun.constant(alloy),
socket: Fun.constant(socket)
};
TinyLoader.setup(function (editor, onSuccess, onFailure) {
var features = Features.setup(realm, editor);
FormatChangers.init(realm, editor);
var apis = TinyApis(editor);
var memBullist = Memento.record(
features.bullist.spec()
);
var memNumlist = Memento.record(
features.numlist.spec()
);
Replacing.set(toolbar, [
memBullist.asSpec(),
memNumlist.asSpec()
]);
var sSetP1 = apis.sSetSelection([ 0, 0, 0 ], 'Thi'.length, [ 0, 0, 0 ], 'Thi'.length);
var sSetP2 = apis.sSetSelection([ 1, 0 ], 'Norma'.length, [ 1, 0 ], 'Norma'.length);
var sSetP3 = apis.sSetSelection([ 2, 0, 0 ], 'Bu'.length, [ 2, 0, 0 ], 'Bu'.length);
var sCheckComponent = function (label, state) {
return function (memento) {
return TestUi.sWaitForToggledState(label, state, realm, memento);
};
};
var sCheckLists = function (situation, stateOfNumlist, stateOfBullist) {
return GeneralSteps.sequence([
sCheckComponent('checking numlist: ' + situation, stateOfNumlist)(memNumlist),
sCheckComponent('checking bullist: ' + situation, stateOfBullist)(memBullist)
]);
};
var sCheckInNumlist = function (situation) {
return sCheckLists(situation, true, false);
};
var sCheckInBullist = function (situation) {
return sCheckLists(situation, false, true);
};
var sCheckInNoList = function (situation) {
return sCheckLists(situation, false, false);
};
var sCheckP1 = function (situation) {
return GeneralSteps.sequence([
sSetP1,
sCheckInNumlist(situation)
]);
};
var sCheckP2 = function (situation) {
return GeneralSteps.sequence([
sSetP2,
sCheckInNoList(situation)
]);
};
var sCheckP3 = function (situation) {
return GeneralSteps.sequence([
sSetP3,
sCheckInBullist(situation)
]);
};
Pipeline.async({}, [
GuiSetup.mAddStyles(Traverse.owner(body), [
'.tinymce-mobile-toolbar-button { padding: 2px; border: 1px solid black; background: white; }',
'.tinymce-mobile-toolbar-button.tinymce-mobile-toolbar-button-selected { background: #cadbee; }',
'.tinymce-mobile-icon-unordered-list:before { content: "ul"; }',
'.tinymce-mobile-icon-ordered-list:before { content: "ol"; }'
]),
apis.sFocus,
apis.sSetContent(
'<ol><li>This is an ordered list</li></ol><p>Normal paragraph</p><ul><li>Bullet list</li></ul>'
),
sCheckP1('initial selection in ol'),
sCheckP2('ol >>> p'),
sCheckP3('p >>> ul'),
sCheckP1('ul >>> ol'),
TestUi.sClickComponent(realm, memBullist),
sCheckInBullist('ol converted to ul'),
TestUi.sClickComponent(realm, memNumlist),
sCheckInNumlist('ul converted back to ol'),
TestUi.sClickComponent(realm, memNumlist),
sCheckInNoList('ol converted to p'),
GuiSetup.mRemoveStyles
], onSuccess, onFailure);
}, {
theme: TestTheme.name()
}, function () {
success();
}, failure);
}
);
| src/themes/mobile/src/test/js/browser/ui/ListTest.js | asynctest(
'Browser Test: ui.ListTest',
[
'ephox.agar.api.Assertions',
'ephox.agar.api.Chain',
'ephox.agar.api.GeneralSteps',
'ephox.agar.api.Mouse',
'ephox.agar.api.Pipeline',
'ephox.agar.api.Step',
'ephox.agar.api.Waiter',
'ephox.alloy.api.behaviour.Behaviour',
'ephox.alloy.api.behaviour.Replacing',
'ephox.alloy.api.behaviour.Toggling',
'ephox.alloy.api.component.GuiFactory',
'ephox.alloy.api.component.Memento',
'ephox.alloy.api.system.Attachment',
'ephox.alloy.api.system.Gui',
'ephox.alloy.test.GuiSetup',
'ephox.katamari.api.Fun',
'ephox.mcagar.api.TinyApis',
'ephox.mcagar.api.TinyLoader',
'ephox.sugar.api.node.Body',
'ephox.sugar.api.properties.Attr',
'ephox.sugar.api.search.Traverse',
'tinymce.themes.mobile.features.Features',
'tinymce.themes.mobile.test.theme.TestTheme',
'tinymce.themes.mobile.util.FormatChangers'
],
function (
Assertions, Chain, GeneralSteps, Mouse, Pipeline, Step, Waiter, Behaviour, Replacing, Toggling, GuiFactory, Memento, Attachment, Gui, GuiSetup, Fun, TinyApis,
TinyLoader, Body, Attr, Traverse, Features, TestTheme, FormatChangers
) {
var success = arguments[arguments.length - 2];
var failure = arguments[arguments.length - 1];
/* This test is going to create a toolbar with both list items on it */
var alloy = Gui.create();
var body = Body.body();
Attachment.attachSystem(body, alloy);
var toolbar = GuiFactory.build({
dom: {
tag: 'div',
classes: [ 'test-toolbar' ]
},
behaviours: Behaviour.derive([
Replacing.config({ })
])
});
var socket = GuiFactory.build({
dom: {
tag: 'div',
classes: [ 'test-socket' ]
}
});
alloy.add(toolbar);
alloy.add(socket);
TestTheme.setup(alloy, socket);
var realm = {
system: Fun.constant(alloy)
};
TinyLoader.setup(function (editor, onSuccess, onFailure) {
var features = Features.setup(realm, editor);
FormatChangers.init(realm, editor);
var apis = TinyApis(editor);
var memBullist = Memento.record(
features.bullist.spec()
);
var memNumlist = Memento.record(
features.numlist.spec()
);
Replacing.set(toolbar, [
memBullist.asSpec(),
memNumlist.asSpec()
]);
var sSetP1 = apis.sSetSelection([ 0, 0, 0 ], 'Thi'.length, [ 0, 0, 0 ], 'Thi'.length);
var sSetP2 = apis.sSetSelection([ 1, 0 ], 'Norma'.length, [ 1, 0 ], 'Norma'.length);
var sSetP3 = apis.sSetSelection([ 2, 0, 0 ], 'Bu'.length, [ 2, 0, 0 ], 'Bu'.length);
var sAssertListIs = function (label, mem, state) {
return Waiter.sTryUntil(
label,
Step.sync(function () {
var button = mem.get(socket);
Assertions.assertEq('Selected/Pressed state of button:\n' +
' (' + Attr.get(button.element(), 'class') + ')', state, Toggling.isOn(button));
}),
100,
1000
);
};
var sCheckInNumlist = function (situation) {
return GeneralSteps.sequence([
sAssertListIs('checking numlist: ' + situation, memNumlist, true),
sAssertListIs('checking bullist: ' + situation, memBullist, false)
]);
};
var sCheckInBullist = function (situation) {
return GeneralSteps.sequence([
sAssertListIs('checking numlist: ' + situation, memNumlist, false),
sAssertListIs('checking bullist: ' + situation, memBullist, true)
]);
};
var sCheckInNoList = function (situation) {
return GeneralSteps.sequence([
sAssertListIs('checking numlist: ' + situation, memNumlist, false),
sAssertListIs('checking bullist: ' + situation, memBullist, false)
]);
};
var sCheckP1 = function (situation) {
return GeneralSteps.sequence([
sSetP1,
sCheckInNumlist(situation)
]);
};
var sCheckP2 = function (situation) {
return GeneralSteps.sequence([
sSetP2,
sCheckInNoList(situation)
]);
};
var sCheckP3 = function (situation) {
return GeneralSteps.sequence([
sSetP3,
sCheckInBullist(situation)
]);
};
var sClickButton = function (mem) {
return Chain.asStep(alloy.element(), [
Chain.mapper(function () {
return mem.get(socket).element();
}),
Mouse.cClick
]);
};
Pipeline.async({}, [
GuiSetup.mAddStyles(Traverse.owner(body), [
'.tinymce-mobile-toolbar-button { padding: 2px; border: 1px solid black; background: white; }',
'.tinymce-mobile-toolbar-button.tinymce-mobile-toolbar-button-selected { background: #cadbee; }',
'.tinymce-mobile-icon-unordered-list:before { content: "ul"; }',
'.tinymce-mobile-icon-ordered-list:before { content: "ol"; }'
]),
apis.sFocus,
apis.sSetContent(
'<ol><li>This is an ordered list</li></ol><p>Normal paragraph</p><ul><li>Bullet list</li></ul>'
),
sCheckP1('initial selection in ol'),
sCheckP2('ol >>> p'),
sCheckP3('p >>> ul'),
sCheckP1('ul >>> ol'),
sClickButton(memBullist),
sCheckInBullist('ol converted to ul'),
sClickButton(memNumlist),
sCheckInNumlist('ul converted back to ol'),
sClickButton(memNumlist),
sCheckInNoList('ol converted to p'),
GuiSetup.mRemoveStyles
], onSuccess, onFailure);
}, {
theme: TestTheme.name()
}, function () {
success();
}, failure);
}
);
| TM-93: using general testing methods
| src/themes/mobile/src/test/js/browser/ui/ListTest.js | TM-93: using general testing methods | <ide><path>rc/themes/mobile/src/test/js/browser/ui/ListTest.js
<ide> 'Browser Test: ui.ListTest',
<ide>
<ide> [
<del> 'ephox.agar.api.Assertions',
<del> 'ephox.agar.api.Chain',
<ide> 'ephox.agar.api.GeneralSteps',
<del> 'ephox.agar.api.Mouse',
<ide> 'ephox.agar.api.Pipeline',
<del> 'ephox.agar.api.Step',
<del> 'ephox.agar.api.Waiter',
<ide> 'ephox.alloy.api.behaviour.Behaviour',
<ide> 'ephox.alloy.api.behaviour.Replacing',
<del> 'ephox.alloy.api.behaviour.Toggling',
<ide> 'ephox.alloy.api.component.GuiFactory',
<ide> 'ephox.alloy.api.component.Memento',
<ide> 'ephox.alloy.api.system.Attachment',
<ide> 'ephox.mcagar.api.TinyApis',
<ide> 'ephox.mcagar.api.TinyLoader',
<ide> 'ephox.sugar.api.node.Body',
<del> 'ephox.sugar.api.properties.Attr',
<ide> 'ephox.sugar.api.search.Traverse',
<ide> 'tinymce.themes.mobile.features.Features',
<ide> 'tinymce.themes.mobile.test.theme.TestTheme',
<add> 'tinymce.themes.mobile.test.ui.TestUi',
<ide> 'tinymce.themes.mobile.util.FormatChangers'
<ide> ],
<ide>
<ide> function (
<del> Assertions, Chain, GeneralSteps, Mouse, Pipeline, Step, Waiter, Behaviour, Replacing, Toggling, GuiFactory, Memento, Attachment, Gui, GuiSetup, Fun, TinyApis,
<del> TinyLoader, Body, Attr, Traverse, Features, TestTheme, FormatChangers
<add> GeneralSteps, Pipeline, Behaviour, Replacing, GuiFactory, Memento, Attachment, Gui, GuiSetup, Fun, TinyApis, TinyLoader, Body, Traverse, Features, TestTheme,
<add> TestUi, FormatChangers
<ide> ) {
<ide> var success = arguments[arguments.length - 2];
<ide> var failure = arguments[arguments.length - 1];
<ide> TestTheme.setup(alloy, socket);
<ide>
<ide> var realm = {
<del> system: Fun.constant(alloy)
<add> system: Fun.constant(alloy),
<add> socket: Fun.constant(socket)
<ide> };
<ide>
<ide> TinyLoader.setup(function (editor, onSuccess, onFailure) {
<ide> var sSetP2 = apis.sSetSelection([ 1, 0 ], 'Norma'.length, [ 1, 0 ], 'Norma'.length);
<ide> var sSetP3 = apis.sSetSelection([ 2, 0, 0 ], 'Bu'.length, [ 2, 0, 0 ], 'Bu'.length);
<ide>
<del> var sAssertListIs = function (label, mem, state) {
<del> return Waiter.sTryUntil(
<del> label,
<del> Step.sync(function () {
<del> var button = mem.get(socket);
<del> Assertions.assertEq('Selected/Pressed state of button:\n' +
<del> ' (' + Attr.get(button.element(), 'class') + ')', state, Toggling.isOn(button));
<del> }),
<del> 100,
<del> 1000
<del> );
<add> var sCheckComponent = function (label, state) {
<add> return function (memento) {
<add> return TestUi.sWaitForToggledState(label, state, realm, memento);
<add> };
<add> };
<add>
<add> var sCheckLists = function (situation, stateOfNumlist, stateOfBullist) {
<add> return GeneralSteps.sequence([
<add> sCheckComponent('checking numlist: ' + situation, stateOfNumlist)(memNumlist),
<add> sCheckComponent('checking bullist: ' + situation, stateOfBullist)(memBullist)
<add> ]);
<ide> };
<ide>
<ide> var sCheckInNumlist = function (situation) {
<del> return GeneralSteps.sequence([
<del> sAssertListIs('checking numlist: ' + situation, memNumlist, true),
<del> sAssertListIs('checking bullist: ' + situation, memBullist, false)
<del> ]);
<add> return sCheckLists(situation, true, false);
<ide> };
<ide>
<ide> var sCheckInBullist = function (situation) {
<del> return GeneralSteps.sequence([
<del> sAssertListIs('checking numlist: ' + situation, memNumlist, false),
<del> sAssertListIs('checking bullist: ' + situation, memBullist, true)
<del> ]);
<add> return sCheckLists(situation, false, true);
<ide> };
<ide>
<ide> var sCheckInNoList = function (situation) {
<del> return GeneralSteps.sequence([
<del> sAssertListIs('checking numlist: ' + situation, memNumlist, false),
<del> sAssertListIs('checking bullist: ' + situation, memBullist, false)
<del> ]);
<add> return sCheckLists(situation, false, false);
<ide> };
<ide>
<ide> var sCheckP1 = function (situation) {
<ide> ]);
<ide> };
<ide>
<del> var sClickButton = function (mem) {
<del> return Chain.asStep(alloy.element(), [
<del> Chain.mapper(function () {
<del> return mem.get(socket).element();
<del> }),
<del> Mouse.cClick
<del> ]);
<del> };
<del>
<ide> Pipeline.async({}, [
<ide> GuiSetup.mAddStyles(Traverse.owner(body), [
<ide> '.tinymce-mobile-toolbar-button { padding: 2px; border: 1px solid black; background: white; }',
<ide> sCheckP3('p >>> ul'),
<ide> sCheckP1('ul >>> ol'),
<ide>
<del> sClickButton(memBullist),
<add> TestUi.sClickComponent(realm, memBullist),
<ide> sCheckInBullist('ol converted to ul'),
<del> sClickButton(memNumlist),
<add> TestUi.sClickComponent(realm, memNumlist),
<ide> sCheckInNumlist('ul converted back to ol'),
<del> sClickButton(memNumlist),
<add> TestUi.sClickComponent(realm, memNumlist),
<ide> sCheckInNoList('ol converted to p'),
<ide> GuiSetup.mRemoveStyles
<ide> ], onSuccess, onFailure); |
|
JavaScript | mit | 992f7302a94035d7e8178f3eeb857427eec24de9 | 0 | pratikju/go-chat | $(document).ready(function(){
var name_color_map = {};
var jsonObj = JSON.parse($('#data').text());
socket_addr = 'ws://'+ jsonObj.context +'/websocket';
var websocket = new WebSocket(socket_addr);
console.log("Websocket - status: " + websocket.readyState);
websocket.onopen = function(res) {
console.log("CONNECTION opened..." + this.readyState);
websocket.send(jsonObj.name + ' joined the chat.');
}
websocket.onmessage = function(res) {
var regEx = /(.*)~~(.*)$/;
var dataArray = regEx.exec(res.data.replace(/\n/g,'<br/>'));
var div_id = "div" + getRandomIntInclusive(0,50000);
var random_color = 'rgb(' + (Math.floor(Math.random() * 150)) + ',' + (Math.floor(Math.random() * 150)) + ',' + (Math.floor(Math.random() * 150)) + ')';
if(dataArray != null){
if(dataArray[1] != jsonObj.name){
$('#chat_box').append('<div id="{id}" class="messages pull-left">'.interpolate({id: div_id}));
$('#{id}'.interpolate({id: div_id})).append('<p style="color:{color}"><strong>{content}</strong></p>'.interpolate({color: find_suitable_color(dataArray, random_color), content: dataArray[1]}));
$('#notify')[0].play();
}else{
$('#chat_box').append('<div id="{id}" class="mymessages pull-right">'.interpolate({id: div_id}));
}
if(checkForLink(dataArray[2])){
$('#{id}'.interpolate({id: div_id})).append('<a href="{link}" target="_blank">{link}</a>'.interpolate({link: dataArray[2]}));
}else{
$('#{id}'.interpolate({id: div_id})).append('<p>{content}</p>'.interpolate({content: dataArray[2]}));
}
$('#chat_box').append('</div><hr/>');
}else{
$('#chat_box').append('<p style="text-align:center;font-weight:bold">{content}</p><br/>'.interpolate({content: res.data}));
$('#notify')[0].play();
}
$('#chat_box').animate({scrollTop: $('#chat_box').prop("scrollHeight")},'fast');
}
websocket.onerror = function(res) {
console.log("Error occured sending..." + m.data);
}
websocket.onclose = function(res) {
console.log("Disconnected - status " + this.readyState);
}
String.prototype.interpolate = function (o) {
return this.replace(/{([^{}]*)}/g,
function (a, b) {
var r = o[b];
return typeof r === 'string' || typeof r === 'number' ? r : a;
});
};
var getRandomIntInclusive = function (min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
var checkForLink = function(text){
var regEx = /(http|https|ftp|ftps)\:\/\/[a-zA-Z0-9\-\.]+\.[a-zA-Z]{2,3}(\/\S*)?/ ;
if(regEx.test(text)){
return true;
}
return false;
}
var find_suitable_color = function(dataArray, random_color){
if(name_color_map[dataArray[1]] === undefined){
name_color_map[dataArray[1]] = random_color;
}
return name_color_map[dataArray[1]];
}
$('#chat_prompt').val('');
$('#send').on('click',function(){
if($('#chat_prompt').val().trim() === ""){
return false;
}
websocket.send(jsonObj.name + '~~' +$('#chat_prompt').val())
$('#chat_prompt').val('');
});
$('#clear_chat').on('click',function(){
$('#chat_box').html('');
});
$('#leave_chat').on('click',function(){
$('#chat_box').append('<p style="text-align:center;font-weight:bold">you left the chat.</p><br/>');
websocket.send(jsonObj.name + ' left the chat.');
websocket.close();
});
$('#chat_prompt').keypress(function (e) {
var key = e.which;
if(key == 13){
$('#send').trigger('click');
return false;
}
});
});
| javascripts/chat.js | $(document).ready(function(){
var jsonObj = JSON.parse($('#data').text());
socket_addr = 'ws://'+ jsonObj.context +'/websocket';
var websocket = new WebSocket(socket_addr);
console.log("Websocket - status: " + websocket.readyState);
websocket.onopen = function(res) {
console.log("CONNECTION opened..." + this.readyState);
websocket.send(jsonObj.name + ' joined the chat.');
}
websocket.onmessage = function(res) {
var regEx = /(.*)~~(.*)$/;
var dataArray = regEx.exec(res.data.replace(/\n/g,'<br/>'));
var div_id = "div" + getRandomIntInclusive(0,50000);
var random_color = 'rgb(' + (Math.floor(Math.random() * 256)) + ',' + (Math.floor(Math.random() * 256)) + ',' + (Math.floor(Math.random() * 256)) + ')';
if(dataArray != null){
if(dataArray[1] != jsonObj.name){
$('#chat_box').append('<div id="{id}" class="messages pull-left">'.interpolate({id: div_id}));
$('#{id}'.interpolate({id: div_id})).append('<p style="color:{color}"><strong>{content}</strong></p>'.interpolate({color: random_color, content: dataArray[1]}));
$('#notify')[0].play();
}else{
$('#chat_box').append('<div id="{id}" class="mymessages pull-right">'.interpolate({id: div_id}));
}
if(checkForLink(dataArray[2])){
$('#{id}'.interpolate({id: div_id})).append('<a href="{link}" target="_blank">{link}</a>'.interpolate({link: dataArray[2]}));
}else{
$('#{id}'.interpolate({id: div_id})).append('<p>{content}</p>'.interpolate({content: dataArray[2]}));
}
$('#chat_box').append('</div><hr/>');
}else{
$('#chat_box').append('<p style="text-align:center;font-weight:bold">{content}</p><br/>'.interpolate({content: res.data}));
$('#notify')[0].play();
}
$('#chat_box').animate({scrollTop: $('#chat_box').prop("scrollHeight")},'fast');
}
String.prototype.interpolate = function (o) {
return this.replace(/{([^{}]*)}/g,
function (a, b) {
var r = o[b];
return typeof r === 'string' || typeof r === 'number' ? r : a;
});
};
websocket.onerror = function(res) {
console.log("Error occured sending..." + m.data);
}
websocket.onclose = function(res) {
console.log("Disconnected - status " + this.readyState);
}
var getRandomIntInclusive = function (min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
var checkForLink = function(text){
var regEx = /(http|https|ftp|ftps)\:\/\/[a-zA-Z0-9\-\.]+\.[a-zA-Z]{2,3}(\/\S*)?/ ;
if(regEx.test(text)){
return true;
}
return false;
}
$('#chat_prompt').val('');
$('#send').on('click',function(){
if($('#chat_prompt').val().trim() === ""){
return false;
}
websocket.send(jsonObj.name + '~~' +$('#chat_prompt').val())
$('#chat_prompt').val('');
});
$('#clear_chat').on('click',function(){
$('#chat_box').html('');
});
$('#leave_chat').on('click',function(){
$('#chat_box').append('<p style="text-align:center;font-weight:bold">you left the chat.</p><br/>');
websocket.send(jsonObj.name + ' left the chat.');
websocket.close();
});
$('#chat_prompt').keypress(function (e) {
var key = e.which;
if(key == 13){
$('#send').trigger('click');
return false;
}
});
});
| color code added for names
| javascripts/chat.js | color code added for names | <ide><path>avascripts/chat.js
<ide> $(document).ready(function(){
<add> var name_color_map = {};
<ide> var jsonObj = JSON.parse($('#data').text());
<ide> socket_addr = 'ws://'+ jsonObj.context +'/websocket';
<ide> var websocket = new WebSocket(socket_addr);
<ide> var regEx = /(.*)~~(.*)$/;
<ide> var dataArray = regEx.exec(res.data.replace(/\n/g,'<br/>'));
<ide> var div_id = "div" + getRandomIntInclusive(0,50000);
<del> var random_color = 'rgb(' + (Math.floor(Math.random() * 256)) + ',' + (Math.floor(Math.random() * 256)) + ',' + (Math.floor(Math.random() * 256)) + ')';
<add> var random_color = 'rgb(' + (Math.floor(Math.random() * 150)) + ',' + (Math.floor(Math.random() * 150)) + ',' + (Math.floor(Math.random() * 150)) + ')';
<ide> if(dataArray != null){
<ide> if(dataArray[1] != jsonObj.name){
<ide> $('#chat_box').append('<div id="{id}" class="messages pull-left">'.interpolate({id: div_id}));
<del> $('#{id}'.interpolate({id: div_id})).append('<p style="color:{color}"><strong>{content}</strong></p>'.interpolate({color: random_color, content: dataArray[1]}));
<add> $('#{id}'.interpolate({id: div_id})).append('<p style="color:{color}"><strong>{content}</strong></p>'.interpolate({color: find_suitable_color(dataArray, random_color), content: dataArray[1]}));
<ide> $('#notify')[0].play();
<ide> }else{
<ide> $('#chat_box').append('<div id="{id}" class="mymessages pull-right">'.interpolate({id: div_id}));
<ide> $('#chat_box').animate({scrollTop: $('#chat_box').prop("scrollHeight")},'fast');
<ide> }
<ide>
<add> websocket.onerror = function(res) {
<add> console.log("Error occured sending..." + m.data);
<add> }
<add> websocket.onclose = function(res) {
<add> console.log("Disconnected - status " + this.readyState);
<add> }
<add>
<ide> String.prototype.interpolate = function (o) {
<ide> return this.replace(/{([^{}]*)}/g,
<ide> function (a, b) {
<ide> return typeof r === 'string' || typeof r === 'number' ? r : a;
<ide> });
<ide> };
<del>
<del> websocket.onerror = function(res) {
<del> console.log("Error occured sending..." + m.data);
<del> }
<del> websocket.onclose = function(res) {
<del> console.log("Disconnected - status " + this.readyState);
<del> }
<ide>
<ide> var getRandomIntInclusive = function (min, max) {
<ide> return Math.floor(Math.random() * (max - min + 1)) + min;
<ide> return true;
<ide> }
<ide> return false;
<add> }
<add>
<add> var find_suitable_color = function(dataArray, random_color){
<add> if(name_color_map[dataArray[1]] === undefined){
<add> name_color_map[dataArray[1]] = random_color;
<add> }
<add> return name_color_map[dataArray[1]];
<ide> }
<ide>
<ide> $('#chat_prompt').val(''); |
|
Java | lgpl-2.1 | 13a8299cf46cb958312794bf2660a2088f89c7a5 | 0 | lilicoding/soot-infoflow,xph906/FlowDroidInfoflowNew,wsnavely/soot-infoflow-android,kaunder/soot-infoflow,jgarci40/soot-infoflow,secure-software-engineering/soot-infoflow,johspaeth/soot-infoflow,matedealer/soot-infoflow,wangxiayang/soot-infoflow | /*******************************************************************************
* Copyright (c) 2012 Secure Software Engineering Group at EC SPRIDE.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v2.1
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
*
* Contributors: Christian Fritz, Steven Arzt, Siegfried Rasthofer, Eric
* Bodden, and others.
******************************************************************************/
package soot.jimple.infoflow.solver;
import heros.solver.IDESolver;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import soot.Scene;
import soot.SootField;
import soot.SootMethod;
import soot.Unit;
import soot.Value;
import soot.jimple.AssignStmt;
import soot.jimple.StaticFieldRef;
import soot.jimple.Stmt;
import soot.jimple.toolkits.callgraph.Edge;
import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG;
import soot.jimple.toolkits.ide.icfg.JimpleBasedInterproceduralCFG;
import soot.toolkits.graph.DirectedGraph;
import soot.toolkits.graph.MHGPostDominatorsFinder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
/**
* Interprocedural control-flow graph for the infoflow solver
*
* @author Steven Arzt
* @author Eric Bodden
*/
public class InfoflowCFG implements IInfoflowCFG {
private static enum StaticFieldUse {
Unknown,
Unused,
Read,
Write,
ReadWrite
}
protected final Map<SootMethod, Map<SootField, StaticFieldUse>> staticFieldUses =
new ConcurrentHashMap<SootMethod, Map<SootField,StaticFieldUse>>();
protected final BiDiInterproceduralCFG<Unit, SootMethod> delegate;
protected final LoadingCache<Unit,UnitContainer> unitToPostdominator =
IDESolver.DEFAULT_CACHE_BUILDER.build( new CacheLoader<Unit,UnitContainer>() {
@Override
public UnitContainer load(Unit unit) throws Exception {
SootMethod method = getMethodOf(unit);
DirectedGraph<Unit> graph = delegate.getOrCreateUnitGraph(method);
MHGPostDominatorsFinder<Unit> postdominatorFinder = new MHGPostDominatorsFinder<Unit>(graph);
Unit postdom = postdominatorFinder.getImmediateDominator(unit);
if (postdom == null)
return new UnitContainer(method);
else
return new UnitContainer(postdom);
}
});
public InfoflowCFG() {
this(new JimpleBasedInterproceduralCFG());
}
public InfoflowCFG(BiDiInterproceduralCFG<Unit,SootMethod> delegate) {
this.delegate = delegate;
}
@Override
public UnitContainer getPostdominatorOf(Unit u) {
return unitToPostdominator.getUnchecked(u);
}
//delegate methods follow
@Override
public SootMethod getMethodOf(Unit u) {
return delegate.getMethodOf(u);
}
@Override
public List<Unit> getSuccsOf(Unit u) {
return delegate.getSuccsOf(u);
}
@Override
public boolean isExitStmt(Unit u) {
return delegate.isExitStmt(u);
}
@Override
public boolean isStartPoint(Unit u) {
return delegate.isStartPoint(u);
}
@Override
public boolean isFallThroughSuccessor(Unit u, Unit succ) {
return delegate.isFallThroughSuccessor(u, succ);
}
@Override
public boolean isBranchTarget(Unit u, Unit succ) {
return delegate.isBranchTarget(u, succ);
}
@Override
public Collection<Unit> getStartPointsOf(SootMethod m) {
return delegate.getStartPointsOf(m);
}
@Override
public boolean isCallStmt(Unit u) {
return delegate.isCallStmt(u);
}
@Override
public Set<Unit> allNonCallStartNodes() {
return delegate.allNonCallStartNodes();
}
@Override
public Collection<SootMethod> getCalleesOfCallAt(Unit u) {
return delegate.getCalleesOfCallAt(u);
}
@Override
public Collection<Unit> getCallersOf(SootMethod m) {
return delegate.getCallersOf(m);
}
@Override
public Collection<Unit> getReturnSitesOfCallAt(Unit u) {
return delegate.getReturnSitesOfCallAt(u);
}
@Override
public Set<Unit> getCallsFromWithin(SootMethod m) {
return delegate.getCallsFromWithin(m);
}
@Override
public List<Unit> getPredsOf(Unit u) {
return delegate.getPredsOf(u);
}
@Override
public Collection<Unit> getEndPointsOf(SootMethod m) {
return delegate.getEndPointsOf(m);
}
@Override
public List<Unit> getPredsOfCallAt(Unit u) {
return delegate.getPredsOf(u);
}
@Override
public Set<Unit> allNonCallEndNodes() {
return delegate.allNonCallEndNodes();
}
@Override
public DirectedGraph<Unit> getOrCreateUnitGraph(SootMethod m) {
return delegate.getOrCreateUnitGraph(m);
}
@Override
public List<Value> getParameterRefs(SootMethod m) {
return delegate.getParameterRefs(m);
}
@Override
public boolean isReturnSite(Unit n) {
return delegate.isReturnSite(n);
}
@Override
public boolean isStaticFieldRead(SootMethod method, SootField variable) {
return isStaticFieldUsed(method, variable, new HashSet<SootMethod>(), true);
}
@Override
public boolean isStaticFieldUsed(SootMethod method, SootField variable) {
return isStaticFieldUsed(method, variable, new HashSet<SootMethod>(), false);
}
private boolean isStaticFieldUsed(SootMethod method, SootField variable,
Set<SootMethod> runList, boolean readOnly) {
// Without a body, we cannot say much
if (!method.hasActiveBody())
return false;
// Do not process the same method twice
if (!runList.add(method))
return false;
// Do we already have an entry?
Map<SootField, StaticFieldUse> entry = staticFieldUses.get(method);
if (entry != null) {
StaticFieldUse b = entry.get(variable);
if (b != null && b != StaticFieldUse.Unknown) {
if (readOnly)
return b == StaticFieldUse.Read || b == StaticFieldUse.ReadWrite;
else
return b != StaticFieldUse.Unused;
}
}
// Scan for references to this variable
for (Unit u : method.getActiveBody().getUnits()) {
if (u instanceof AssignStmt) {
AssignStmt assign = (AssignStmt) u;
if (assign.getLeftOp() instanceof StaticFieldRef) {
SootField sf = ((StaticFieldRef) assign.getLeftOp()).getField();
registerStaticVariableUse(method, sf, StaticFieldUse.Write);
if (!readOnly && variable.equals(variable))
return true;
}
if (assign.getRightOp() instanceof StaticFieldRef) {
SootField sf = ((StaticFieldRef) assign.getRightOp()).getField();
registerStaticVariableUse(method, sf, StaticFieldUse.Read);
if (variable.equals(variable))
return true;
}
}
if (((Stmt) u).containsInvokeExpr())
for (Iterator<Edge> edgeIt = Scene.v().getCallGraph().edgesOutOf(u); edgeIt.hasNext(); ) {
Edge e = edgeIt.next();
if (isStaticFieldUsed(e.getTgt().method(), variable, runList, readOnly))
return true;
}
}
// Variable is not read
registerStaticVariableUse(method, variable, StaticFieldUse.Unused);
return false;
}
private void registerStaticVariableUse(SootMethod method,
SootField variable, StaticFieldUse fieldUse) {
Map<SootField, StaticFieldUse> entry = staticFieldUses.get(method);
StaticFieldUse oldUse;
synchronized (staticFieldUses) {
if (entry == null) {
entry = new ConcurrentHashMap<SootField, StaticFieldUse>();
staticFieldUses.put(method, entry);
entry.put(variable, fieldUse);
return;
}
oldUse = entry.get(variable);
if (oldUse == null) {
entry.put(variable, fieldUse);
return;
}
}
// This part is monotonic, so no need for synchronization
StaticFieldUse newUse;
switch (oldUse) {
case Unknown :
case Unused :
case ReadWrite :
newUse = fieldUse;
break;
case Read :
newUse = (fieldUse == StaticFieldUse.Read) ? oldUse : StaticFieldUse.ReadWrite;
break;
case Write :
newUse = (fieldUse == StaticFieldUse.Write) ? oldUse : StaticFieldUse.ReadWrite;
break;
default:
throw new RuntimeException("Invalid field use");
}
entry.put(variable, newUse);
}
}
| src/soot/jimple/infoflow/solver/InfoflowCFG.java | /*******************************************************************************
* Copyright (c) 2012 Secure Software Engineering Group at EC SPRIDE.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v2.1
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
*
* Contributors: Christian Fritz, Steven Arzt, Siegfried Rasthofer, Eric
* Bodden, and others.
******************************************************************************/
package soot.jimple.infoflow.solver;
import gnu.trove.set.hash.THashSet;
import heros.solver.IDESolver;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import soot.Scene;
import soot.SootField;
import soot.SootMethod;
import soot.Unit;
import soot.Value;
import soot.jimple.AssignStmt;
import soot.jimple.StaticFieldRef;
import soot.jimple.Stmt;
import soot.jimple.toolkits.callgraph.Edge;
import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG;
import soot.jimple.toolkits.ide.icfg.JimpleBasedInterproceduralCFG;
import soot.toolkits.graph.DirectedGraph;
import soot.toolkits.graph.MHGPostDominatorsFinder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
/**
* Interprocedural control-flow graph for the infoflow solver
*
* @author Steven Arzt
* @author Eric Bodden
*/
public class InfoflowCFG implements IInfoflowCFG {
private static enum StaticFieldUse {
Unknown,
Unused,
Read,
Write,
ReadWrite
}
protected final Map<SootMethod, Map<SootField, StaticFieldUse>> staticFieldUses =
new ConcurrentHashMap<SootMethod, Map<SootField,StaticFieldUse>>();
protected final BiDiInterproceduralCFG<Unit, SootMethod> delegate;
protected final LoadingCache<Unit,UnitContainer> unitToPostdominator =
IDESolver.DEFAULT_CACHE_BUILDER.build( new CacheLoader<Unit,UnitContainer>() {
@Override
public UnitContainer load(Unit unit) throws Exception {
SootMethod method = getMethodOf(unit);
DirectedGraph<Unit> graph = delegate.getOrCreateUnitGraph(method);
MHGPostDominatorsFinder<Unit> postdominatorFinder = new MHGPostDominatorsFinder<Unit>(graph);
Unit postdom = postdominatorFinder.getImmediateDominator(unit);
if (postdom == null)
return new UnitContainer(method);
else
return new UnitContainer(postdom);
}
});
public InfoflowCFG() {
this(new JimpleBasedInterproceduralCFG());
}
public InfoflowCFG(BiDiInterproceduralCFG<Unit,SootMethod> delegate) {
this.delegate = delegate;
}
@Override
public UnitContainer getPostdominatorOf(Unit u) {
return unitToPostdominator.getUnchecked(u);
}
//delegate methods follow
@Override
public SootMethod getMethodOf(Unit u) {
return delegate.getMethodOf(u);
}
@Override
public List<Unit> getSuccsOf(Unit u) {
return delegate.getSuccsOf(u);
}
@Override
public boolean isExitStmt(Unit u) {
return delegate.isExitStmt(u);
}
@Override
public boolean isStartPoint(Unit u) {
return delegate.isStartPoint(u);
}
@Override
public boolean isFallThroughSuccessor(Unit u, Unit succ) {
return delegate.isFallThroughSuccessor(u, succ);
}
@Override
public boolean isBranchTarget(Unit u, Unit succ) {
return delegate.isBranchTarget(u, succ);
}
@Override
public Collection<Unit> getStartPointsOf(SootMethod m) {
return delegate.getStartPointsOf(m);
}
@Override
public boolean isCallStmt(Unit u) {
return delegate.isCallStmt(u);
}
@Override
public Set<Unit> allNonCallStartNodes() {
return delegate.allNonCallStartNodes();
}
@Override
public Collection<SootMethod> getCalleesOfCallAt(Unit u) {
return delegate.getCalleesOfCallAt(u);
}
@Override
public Collection<Unit> getCallersOf(SootMethod m) {
return delegate.getCallersOf(m);
}
@Override
public Collection<Unit> getReturnSitesOfCallAt(Unit u) {
return delegate.getReturnSitesOfCallAt(u);
}
@Override
public Set<Unit> getCallsFromWithin(SootMethod m) {
return delegate.getCallsFromWithin(m);
}
@Override
public List<Unit> getPredsOf(Unit u) {
return delegate.getPredsOf(u);
}
@Override
public Collection<Unit> getEndPointsOf(SootMethod m) {
return delegate.getEndPointsOf(m);
}
@Override
public List<Unit> getPredsOfCallAt(Unit u) {
return delegate.getPredsOf(u);
}
@Override
public Set<Unit> allNonCallEndNodes() {
return delegate.allNonCallEndNodes();
}
@Override
public DirectedGraph<Unit> getOrCreateUnitGraph(SootMethod m) {
return delegate.getOrCreateUnitGraph(m);
}
@Override
public List<Value> getParameterRefs(SootMethod m) {
return delegate.getParameterRefs(m);
}
@Override
public boolean isReturnSite(Unit n) {
return delegate.isReturnSite(n);
}
@Override
public boolean isStaticFieldRead(SootMethod method, SootField variable) {
return isStaticFieldUsed(method, variable, new THashSet<SootMethod>(), true);
}
@Override
public boolean isStaticFieldUsed(SootMethod method, SootField variable) {
return isStaticFieldUsed(method, variable, new THashSet<SootMethod>(), false);
}
private boolean isStaticFieldUsed(SootMethod method, SootField variable,
Set<SootMethod> runList, boolean readOnly) {
// Without a body, we cannot say much
if (!method.hasActiveBody())
return false;
// Do not process the same method twice
if (!runList.add(method))
return false;
// Do we already have an entry?
Map<SootField, StaticFieldUse> entry = staticFieldUses.get(method);
if (entry != null) {
StaticFieldUse b = entry.get(variable);
if (b != null && b != StaticFieldUse.Unknown) {
if (readOnly)
return b == StaticFieldUse.Read || b == StaticFieldUse.ReadWrite;
else
return b != StaticFieldUse.Unused;
}
}
// Scan for references to this variable
for (Unit u : method.getActiveBody().getUnits()) {
if (u instanceof AssignStmt) {
AssignStmt assign = (AssignStmt) u;
if (assign.getLeftOp() instanceof StaticFieldRef) {
SootField sf = ((StaticFieldRef) assign.getLeftOp()).getField();
registerStaticVariableUse(method, sf, StaticFieldUse.Write);
if (!readOnly && variable.equals(variable))
return true;
}
if (assign.getRightOp() instanceof StaticFieldRef) {
SootField sf = ((StaticFieldRef) assign.getRightOp()).getField();
registerStaticVariableUse(method, sf, StaticFieldUse.Read);
if (variable.equals(variable))
return true;
}
}
if (((Stmt) u).containsInvokeExpr())
for (Iterator<Edge> edgeIt = Scene.v().getCallGraph().edgesOutOf(u); edgeIt.hasNext(); ) {
Edge e = edgeIt.next();
if (isStaticFieldUsed(e.getTgt().method(), variable, runList, readOnly))
return true;
}
}
// Variable is not read
registerStaticVariableUse(method, variable, StaticFieldUse.Unused);
return false;
}
private void registerStaticVariableUse(SootMethod method,
SootField variable, StaticFieldUse fieldUse) {
Map<SootField, StaticFieldUse> entry = staticFieldUses.get(method);
StaticFieldUse oldUse;
synchronized (staticFieldUses) {
if (entry == null) {
entry = new ConcurrentHashMap<SootField, StaticFieldUse>();
staticFieldUses.put(method, entry);
entry.put(variable, fieldUse);
return;
}
oldUse = entry.get(variable);
if (oldUse == null) {
entry.put(variable, fieldUse);
return;
}
}
// This part is monotonic, so no need for synchronization
StaticFieldUse newUse;
switch (oldUse) {
case Unknown :
case Unused :
case ReadWrite :
newUse = fieldUse;
break;
case Read :
newUse = (fieldUse == StaticFieldUse.Read) ? oldUse : StaticFieldUse.ReadWrite;
break;
case Write :
newUse = (fieldUse == StaticFieldUse.Write) ? oldUse : StaticFieldUse.ReadWrite;
break;
default:
throw new RuntimeException("Invalid field use");
}
entry.put(variable, newUse);
}
}
| fixed a wrong class reference
| src/soot/jimple/infoflow/solver/InfoflowCFG.java | fixed a wrong class reference | <ide><path>rc/soot/jimple/infoflow/solver/InfoflowCFG.java
<ide> ******************************************************************************/
<ide> package soot.jimple.infoflow.solver;
<ide>
<del>import gnu.trove.set.hash.THashSet;
<ide> import heros.solver.IDESolver;
<ide>
<ide> import java.util.Collection;
<add>import java.util.HashSet;
<ide> import java.util.Iterator;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide>
<ide> @Override
<ide> public boolean isStaticFieldRead(SootMethod method, SootField variable) {
<del> return isStaticFieldUsed(method, variable, new THashSet<SootMethod>(), true);
<add> return isStaticFieldUsed(method, variable, new HashSet<SootMethod>(), true);
<ide> }
<ide>
<ide> @Override
<ide> public boolean isStaticFieldUsed(SootMethod method, SootField variable) {
<del> return isStaticFieldUsed(method, variable, new THashSet<SootMethod>(), false);
<add> return isStaticFieldUsed(method, variable, new HashSet<SootMethod>(), false);
<ide> }
<ide>
<ide> private boolean isStaticFieldUsed(SootMethod method, SootField variable, |
|
Java | apache-2.0 | d9d3700bb610fb8d2eb58f5dfddd63876ee5742a | 0 | realityforge/arez,realityforge/arez,realityforge/arez | package org.realityforge.arez.integration;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nonnull;
import org.realityforge.arez.Arez;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.ArezTestUtil;
import org.realityforge.arez.Zone;
import org.realityforge.arez.annotations.ArezComponent;
import org.realityforge.arez.annotations.Computed;
import org.realityforge.arez.annotations.Observable;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class MultiZoneIntegrationTest
extends AbstractIntegrationTest
{
@Test
public void multiZoneScenario()
throws Throwable
{
ArezTestUtil.enableZones();
final Zone zone1 = Arez.createZone();
final Zone zone2 = Arez.createZone();
final ArezContext context1 = zone1.getContext();
final ArezContext context2 = zone2.getContext();
final SpyEventRecorder recorder = new SpyEventRecorder();
context1.getSpy().addSpyEventHandler( recorder );
context2.getSpy().addSpyEventHandler( recorder );
final AtomicReference<PersonModel> person = new AtomicReference<>();
final AtomicReference<PersonModel> person2 = new AtomicReference<>();
zone1.run( () -> {
person.set( PersonModel.create( "Bill", "Smith" ) );
zone2.run( () -> person2.set( PersonModel.create( "Bill", "Smith" ) ) );
} );
context1.autorun( "FirstNamePrinter1",
() -> record( recorder, "firstName1", person.get().getFirstName() ) );
context2.autorun( "FirstNamePrinter2",
() -> record( recorder, "firstName2", person2.get().getFirstName() ) );
context1.autorun( "FullNamePrinter1",
() -> record( recorder, "fullname1", person.get().getFullName() ) );
context2.autorun( "FullNamePrinter2",
() -> record( recorder, "fullname2", person2.get().getFullName() ) );
context1.action( "First Name Update1", true, () -> person.get().setFirstName( "Fred" ) );
context1.action( "Last Name Update1", true, () -> person.get().setLastName( "Donaldo" ) );
context2.action( "Last Name Update2", true, () -> person2.get().setLastName( "Donaldo" ) );
assertEqualsFixture( recorder.eventsAsString() );
}
@Test
public void multiZoneScenario_transactionAlignment()
throws Throwable
{
ArezTestUtil.enableZones();
final Zone zone1 = Arez.createZone();
final Zone zone2 = Arez.createZone();
final ArezContext context1 = zone1.getContext();
final ArezContext context2 = zone2.getContext();
final AtomicReference<PersonModel> person1 = new AtomicReference<>();
final AtomicReference<PersonModel> person2 = new AtomicReference<>();
zone1.run( () -> {
person1.set( PersonModel.create( "Bill", "Smith" ) );
zone2.run( () -> person2.set( PersonModel.create( "Bill", "Smith" ) ) );
} );
context1.action( () -> assertInTransaction( person1.get() ) );
context1.action( () -> assertNotInTransaction( person2.get() ) );
context2.action( () -> assertNotInTransaction( person1.get() ) );
context2.action( () -> assertInTransaction( person2.get() ) );
}
/**
* Test we are in a transaction by trying to observe an observable.
*/
@SuppressWarnings( "ResultOfMethodCallIgnored" )
private void assertInTransaction( @Nonnull final PersonModel person )
{
person.getFirstName();
}
/**
* Test we are not in a transaction by trying to observe an observable.
*/
private void assertNotInTransaction( @Nonnull final PersonModel person )
{
assertThrows( person::getFirstName );
}
@SuppressWarnings( "WeakerAccess" )
@ArezComponent
public static class PersonModel
{
@Nonnull
private String _firstName;
@Nonnull
private String _lastName;
@Nonnull
public static PersonModel create( @Nonnull final String firstName, @Nonnull final String lastName )
{
return new MultiZoneIntegrationTest_Arez_PersonModel( firstName, lastName );
}
PersonModel( @Nonnull final String firstName, @Nonnull final String lastName )
{
_firstName = firstName;
_lastName = lastName;
}
@Observable
@Nonnull
public String getFirstName()
{
return _firstName;
}
public void setFirstName( @Nonnull final String firstName )
{
_firstName = firstName;
}
@Observable
@Nonnull
public String getLastName()
{
return _lastName;
}
public void setLastName( @Nonnull final String lastName )
{
_lastName = lastName;
}
@Computed
@Nonnull
public String getFullName()
{
return getFirstName() + " " + getLastName();
}
}
}
| integration-tests/src/test/java/org/realityforge/arez/integration/MultiZoneIntegrationTest.java | package org.realityforge.arez.integration;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nonnull;
import org.realityforge.arez.Arez;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.ArezTestUtil;
import org.realityforge.arez.Zone;
import org.realityforge.arez.annotations.ArezComponent;
import org.realityforge.arez.annotations.Computed;
import org.realityforge.arez.annotations.Observable;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class MultiZoneIntegrationTest
extends AbstractIntegrationTest
{
@Test
public void multiZoneScenario()
throws Throwable
{
ArezTestUtil.enableZones();
final Zone zone1 = Arez.createZone();
final Zone zone2 = Arez.createZone();
final ArezContext context1 = zone1.getContext();
final ArezContext context2 = zone2.getContext();
final SpyEventRecorder recorder = new SpyEventRecorder();
context1.getSpy().addSpyEventHandler( recorder );
context2.getSpy().addSpyEventHandler( recorder );
final AtomicReference<PersonModel> person = new AtomicReference<>();
final AtomicReference<PersonModel> person2 = new AtomicReference<>();
zone1.run( () ->
{
person.set( PersonModel.create( "Bill", "Smith" ) );
zone2.run( () -> person2.set( PersonModel.create( "Bill", "Smith" ) ) );
} );
context1.autorun( "FirstNamePrinter1",
() -> record( recorder, "firstName1", person.get().getFirstName() ) );
context2.autorun( "FirstNamePrinter2",
() -> record( recorder, "firstName2", person2.get().getFirstName() ) );
context1.autorun( "FullNamePrinter1",
() -> record( recorder, "fullname1", person.get().getFullName() ) );
context2.autorun( "FullNamePrinter2",
() -> record( recorder, "fullname2", person2.get().getFullName() ) );
context1.action( "First Name Update1", true, () -> person.get().setFirstName( "Fred" ) );
context1.action( "Last Name Update1", true, () -> person.get().setLastName( "Donaldo" ) );
context2.action( "Last Name Update2", true, () -> person2.get().setLastName( "Donaldo" ) );
assertEqualsFixture( recorder.eventsAsString() );
}
@Test
public void multiZoneScenario_transactionAlignment()
throws Throwable
{
ArezTestUtil.enableZones();
final Zone zone1 = Arez.createZone();
final Zone zone2 = Arez.createZone();
final ArezContext context1 = zone1.getContext();
final ArezContext context2 = zone2.getContext();
final AtomicReference<PersonModel> person1 = new AtomicReference<>();
final AtomicReference<PersonModel> person2 = new AtomicReference<>();
zone1.run( () -> {
person1.set( PersonModel.create( "Bill", "Smith" ) );
zone2.run( () -> person2.set( PersonModel.create( "Bill", "Smith" ) ) );
} );
context1.action( () -> assertInTransaction( person1.get() ) );
context1.action( () -> assertNotInTransaction( person2.get() ) );
context2.action( () -> assertNotInTransaction( person1.get() ) );
context2.action( () -> assertInTransaction( person2.get() ) );
}
/**
* Test we are in a transaction by trying to observe an observable.
*/
@SuppressWarnings( "ResultOfMethodCallIgnored" )
private void assertInTransaction( @Nonnull final PersonModel person )
{
person.getFirstName();
}
/**
* Test we are not in a transaction by trying to observe an observable.
*/
private void assertNotInTransaction( @Nonnull final PersonModel person )
{
assertThrows( person::getFirstName );
}
@SuppressWarnings( "WeakerAccess" )
@ArezComponent
public static class PersonModel
{
@Nonnull
private String _firstName;
@Nonnull
private String _lastName;
@Nonnull
public static PersonModel create( @Nonnull final String firstName, @Nonnull final String lastName )
{
return new MultiZoneIntegrationTest_Arez_PersonModel( firstName, lastName );
}
PersonModel( @Nonnull final String firstName, @Nonnull final String lastName )
{
_firstName = firstName;
_lastName = lastName;
}
@Observable
@Nonnull
public String getFirstName()
{
return _firstName;
}
public void setFirstName( @Nonnull final String firstName )
{
_firstName = firstName;
}
@Observable
@Nonnull
public String getLastName()
{
return _lastName;
}
public void setLastName( @Nonnull final String lastName )
{
_lastName = lastName;
}
@Computed
@Nonnull
public String getFullName()
{
return getFirstName() + " " + getLastName();
}
}
}
| Whitespace
| integration-tests/src/test/java/org/realityforge/arez/integration/MultiZoneIntegrationTest.java | Whitespace | <ide><path>ntegration-tests/src/test/java/org/realityforge/arez/integration/MultiZoneIntegrationTest.java
<ide>
<ide> final AtomicReference<PersonModel> person = new AtomicReference<>();
<ide> final AtomicReference<PersonModel> person2 = new AtomicReference<>();
<del> zone1.run( () ->
<del> {
<del> person.set( PersonModel.create( "Bill", "Smith" ) );
<del> zone2.run( () -> person2.set( PersonModel.create( "Bill", "Smith" ) ) );
<del> } );
<add> zone1.run( () -> {
<add> person.set( PersonModel.create( "Bill", "Smith" ) );
<add> zone2.run( () -> person2.set( PersonModel.create( "Bill", "Smith" ) ) );
<add> } );
<ide>
<ide> context1.autorun( "FirstNamePrinter1",
<ide> () -> record( recorder, "firstName1", person.get().getFirstName() ) ); |
|
Java | agpl-3.0 | 9dd09798c5a13a4b893431a6fb1e49cc415cd7ff | 0 | ubtue/ub_tools,ubtue/ub_tools,ubtue/ub_tools,ubtue/ub_tools,ubtue/ub_tools,ubtue/ub_tools,ubtue/ub_tools,ubtue/ub_tools | package de.unituebingen.ub.ubtools.solrmarcMixin;
import org.marc4j.marc.DataField;
import org.marc4j.marc.Record;
import org.marc4j.marc.Subfield;
import org.marc4j.marc.VariableField;
import org.solrmarc.index.SolrIndexerMixin;
import java.util.*;
public class IxTheoPublisher extends SolrIndexerMixin {
private final static Map<String, String> replacements = new LinkedHashMap<>(128);
static {
// delete commas at the end
replacements.put("\\s*,$", "");
// delete comments
replacements.put("\\[(.*)\\]", "");
// Substitute multiple spaces to single spaces
replacements.put("\\s+", " ");
// insert space after a period if doesn't exists.
replacements.put("\\.(?!\\s)", ". ");
replacements.put("\\s-", "-");
// Replace some abbreviation:
replacements.put(" und ", " u. ");
replacements.put(" der ", " d. ");
// replacements.put("&", "und");
replacements.put("Univ\\.-Verl", "Universitätsverlag");
replacements.put("Verl\\.-Haus", "Verlagshaus");
replacements.put("Verlag-Anst$", "Verlagsanstalt");
replacements.put("Verl\\.-Anst$", "Verlagsanstalt");
replacements.put("Verl-Anst\\.", "Verlagsanstalt");
replacements.put("Verl\\.-Anst\\.", "Verlagsanstalt");
replacements.put("Verlag-Anstalt$", "Verlagsanstalt");
replacements.put("Verlag Anst$", "Verlagsanstalt");
replacements.put("Verlag Anst\\.", "Verlagsanstalt");
replacements.put("Verlag Anstalt", "Verlagsanstalt");
replacements.put("Verlagsanst$", "Verlagsanstalt");
replacements.put("Verlagsanst\\.", "Verlagsanstalt");
replacements.put("^Verlag d\\. ", "");
replacements.put("^Verlag der ", "");
replacements.put("^Verlag des ", "");
replacements.put("Verl\\.", "Verlag");
replacements.put("Verl$", "Verlag");
replacements.put("Akad$", "Akademie");
replacements.put("Akad\\.", "Akademie");
replacements.put("Akadem\\.", "Akademie");
replacements.put("Akade\\.", "Akademie");
replacements.put("Acad$", "Academy");
replacements.put("Acad\\.", "Academy");
replacements.put("wiss\\.", "wissenschaft");
replacements.put("Wiss$", "Wissenschaft");
replacements.put("Wiss\\.", "Wissenschaft");
replacements.put("Lit$", "Literatur");
replacements.put("Lit\\.", "Literatur");
replacements.put("Anst$", "Anstalt");
replacements.put("Anst\\.$", "Anstalt");
replacements.put("anst$", "anstalt");
replacements.put("anst\\.$", "anstalt");
replacements.put("Kathol\\.", "Katholische");
replacements.put("Evang\\.", "Evangelische");
replacements.put("Ev\\.", "Evangelische");
replacements.put("Pr$", "Press");
replacements.put("Pr\\.", "Press");
replacements.put("^Priv\\.", "Privilegierte");
replacements.put("^Privileg\\.", "Privilegierte");
replacements.put("Württ\\.", "Württembergische");
replacements.put("Württemb\\.", "Württembergische");
replacements.put("Bayer\\.", "Bayerische");
replacements.put("ges\\.", "gesellschaft");
replacements.put("ges$", "gesellschaft");
replacements.put("Ges\\.", "Gesellschaft");
replacements.put("Ges$", "Gesellschaft");
replacements.put("Inst\\.", "Institution");
replacements.put("Internat$", "International");
replacements.put("T&T", "International");
replacements.put("Univ\\. of", "University of");
replacements.put("Univ\\.-Bibliothek", "Universitätsbibliothek");
replacements.put("^1st ", "1st. ");
replacements.put(" Fd ", " Field ");
replacements.put(" Fd\\. ", " Field ");
replacements.put(" Svy ", " Survey ");
replacements.put(" Regt", " Regiment");
replacements.put(" Regt\\.", " Regiment");
replacements.put("RE$", " R. E.");
replacements.put("Calif.", "California");
}
private Set<String> publishers = null;
@Override
public void perRecordInit() {
super.perRecordInit();
publishers = null;
}
/**
* Get all available publishers from the record.
*
* @param record the record
* @return publishers
*/
public Set<String> getPublishers(final Record record) {
publishers = new LinkedHashSet<>();
final Set<String> rawPublishers = getRawPublishers(record);
for (String publisher : rawPublishers) {
publisher = publisher.trim();
for (final Map.Entry<String, String> replacement : replacements.entrySet()) {
publisher = publisher.replaceAll(replacement.getKey(), replacement.getValue()).trim();
}
if (!publisher.isEmpty()) {
publishers.add(publisher);
}
}
return publishers;
}
public Set<String> getPublishersOrUnassigned(final Record record) {
final Set<String> publishers = getPublishers(record);
if (publishers == null || publishers.isEmpty()) {
return Collections.singleton("[Unassigned]");
}
return publishers;
}
public Set<String> getRawPublishers(final Record record) {
final Set<String> publishers = new LinkedHashSet<>();
// First check old-style 260b name:
final List<VariableField> list260 = record.getVariableFields("260");
for (final VariableField vf : list260) {
final DataField df = (DataField) vf;
final Subfield current = df.getSubfield('b');
if (current != null) {
publishers.add(current.getData());
}
}
// Now track down relevant RDA-style 264b names; we only care about
// copyright and publication names (and ignore copyright names if
// publication names are present).
final Set<String> pubNames = new LinkedHashSet<>();
final Set<String> copyNames = new LinkedHashSet<>();
final List<VariableField> list264 = record.getVariableFields("264");
for (final VariableField vf : list264) {
final DataField df = (DataField) vf;
final Subfield currentName = df.getSubfield('b');
if (currentName != null) {
final char ind2 = df.getIndicator2();
switch (ind2) {
case '1':
pubNames.add(currentName.getData());
break;
case '4':
copyNames.add(currentName.getData());
break;
}
}
}
if (!pubNames.isEmpty()) {
publishers.addAll(pubNames);
} else if (!copyNames.isEmpty()) {
publishers.addAll(copyNames);
}
return publishers;
}
}
| solrmarc_mixin/src/de/unituebingen/ub/ubtools/solrmarcMixin/IxTheoPublisher.java | package de.unituebingen.ub.ubtools.solrmarcMixin;
import org.marc4j.marc.DataField;
import org.marc4j.marc.Record;
import org.marc4j.marc.Subfield;
import org.marc4j.marc.VariableField;
import org.solrmarc.index.SolrIndexerMixin;
import java.util.*;
public class IxTheoPublisher extends SolrIndexerMixin {
private final static Map<String, String> replacements = new LinkedHashMap<>(128);
static {
// delete commas at the end
replacements.put("\\s*,$", "");
// delete comments
replacements.put("\\[(.*)\\]", "");
// Substitute multiple spaces to single spaces
replacements.put("\\s+", " ");
// insert space after a period if doesn't exists.
replacements.put("\\.(?!\\s)", ". ");
replacements.put("\\s-", "-");
// Replace some abbreviation:
replacements.put(" und ", " u. ");
replacements.put(" der ", " d. ");
// replacements.put("&", "und");
replacements.put("Univ\\.-Verl", "Universitätsverlag");
replacements.put("Verl\\.-Haus", "Verlagshaus");
replacements.put("Verlag-Anst$", "Verlagsanstalt");
replacements.put("Verl\\.-Anst$", "Verlagsanstalt");
replacements.put("Verl-Anst\\.", "Verlagsanstalt");
replacements.put("Verl\\.-Anst\\.", "Verlagsanstalt");
replacements.put("Verlag-Anstalt$", "Verlagsanstalt");
replacements.put("Verlag Anst$", "Verlagsanstalt");
replacements.put("Verlag Anst\\.", "Verlagsanstalt");
replacements.put("Verlag Anstalt", "Verlagsanstalt");
replacements.put("Verlagsanst$", "Verlagsanstalt");
replacements.put("Verlagsanst\\.", "Verlagsanstalt");
replacements.put("^Verlag d\\. ", "");
replacements.put("^Verlag der ", "");
replacements.put("^Verlag des ", "");
replacements.put("Verl\\.", "Verlag");
replacements.put("Verl$", "Verlag");
replacements.put("Akad$", "Akademie");
replacements.put("Akad\\.", "Akademie");
replacements.put("Akadem\\.", "Akademie");
replacements.put("Akade\\.", "Akademie");
replacements.put("Acad$", "Academy");
replacements.put("Acad\\.", "Academy");
replacements.put("wiss\\.", "wissenschaft");
replacements.put("Wiss$", "Wissenschaft");
replacements.put("Wiss\\.", "Wissenschaft");
replacements.put("Lit$", "Literatur");
replacements.put("Lit\\.", "Literatur");
replacements.put("Anst$", "Anstalt");
replacements.put("Anst\\.$", "Anstalt");
replacements.put("anst$", "anstalt");
replacements.put("anst\\.$", "anstalt");
replacements.put("Kathol\\.", "Katholische");
replacements.put("Evang\\.", "Evangelische");
replacements.put("Ev\\.", "Evangelische");
replacements.put("Pr$", "Press");
replacements.put("Pr\\.", "Press");
replacements.put("^Priv\\.", "Privilegierte");
replacements.put("^Privileg\\.", "Privilegierte");
replacements.put("Württ\\.", "Württembergische");
replacements.put("Württemb\\.", "Württembergische");
replacements.put("Bayer\\.", "Bayerische");
replacements.put("ges\\.", "gesellschaft");
replacements.put("ges$", "gesellschaft");
replacements.put("Ges\\.", "Gesellschaft");
replacements.put("Ges$", "Gesellschaft");
replacements.put("Inst\\.", "Institution");
replacements.put("Internat$", "International");
replacements.put("T&T", "International");
replacements.put("Univ\\. of", "University of");
replacements.put("Univ\\.-Bibliothek", "Universitätsbibliothek");
replacements.put("^1st ", "1st. ");
replacements.put(" Fd ", " Field ");
replacements.put(" Fd\\. ", " Field ");
replacements.put(" Svy ", " Survey ");
replacements.put(" Regt", " Regiment");
replacements.put(" Regt\\.", " Regiment");
replacements.put("RE$", " R. E.");
replacements.put("Calif.", "California");
}
private Set<String> publishers = null;
@Override
public void perRecordInit() {
super.perRecordInit();
publishers = null;
}
/**
* Get all available publishers from the record.
*
* @param record the record
* @return publishers
*/
public Set<String> getPublishers(final Record record) {
if (publishers == null) {
publishers = new LinkedHashSet<>();
final Set<String> rawPublishers = getRawPublishers(record);
for (String publisher : rawPublishers) {
publisher = publisher.trim();
for (final Map.Entry<String, String> replacement : replacements.entrySet()) {
publisher = publisher.replaceAll(replacement.getKey(), replacement.getValue()).trim();
}
if (!publisher.isEmpty()) {
publishers.add(publisher);
}
}
}
return publishers;
}
public Set<String> getPublishersOrUnassigned(final Record record) {
final Set<String> publishers = getPublishers(record);
if (publishers == null || publishers.isEmpty()) {
return Collections.singleton("[Unassigned]");
}
return publishers;
}
public Set<String> getRawPublishers(final Record record) {
final Set<String> publishers = new LinkedHashSet<>();
// First check old-style 260b name:
final List<VariableField> list260 = record.getVariableFields("260");
for (final VariableField vf : list260) {
final DataField df = (DataField) vf;
final Subfield current = df.getSubfield('b');
if (current != null) {
publishers.add(current.getData());
}
}
// Now track down relevant RDA-style 264b names; we only care about
// copyright and publication names (and ignore copyright names if
// publication names are present).
final Set<String> pubNames = new LinkedHashSet<>();
final Set<String> copyNames = new LinkedHashSet<>();
final List<VariableField> list264 = record.getVariableFields("264");
for (final VariableField vf : list264) {
final DataField df = (DataField) vf;
final Subfield currentName = df.getSubfield('b');
if (currentName != null) {
final char ind2 = df.getIndicator2();
switch (ind2) {
case '1':
pubNames.add(currentName.getData());
break;
case '4':
copyNames.add(currentName.getData());
break;
}
}
}
if (!pubNames.isEmpty()) {
publishers.addAll(pubNames);
} else if (!copyNames.isEmpty()) {
publishers.addAll(copyNames);
}
return publishers;
}
}
| Fixing problem of oonly one publisher for all items
| solrmarc_mixin/src/de/unituebingen/ub/ubtools/solrmarcMixin/IxTheoPublisher.java | Fixing problem of oonly one publisher for all items | <ide><path>olrmarc_mixin/src/de/unituebingen/ub/ubtools/solrmarcMixin/IxTheoPublisher.java
<ide> * @return publishers
<ide> */
<ide> public Set<String> getPublishers(final Record record) {
<del> if (publishers == null) {
<del> publishers = new LinkedHashSet<>();
<del> final Set<String> rawPublishers = getRawPublishers(record);
<add> publishers = new LinkedHashSet<>();
<add> final Set<String> rawPublishers = getRawPublishers(record);
<ide>
<del> for (String publisher : rawPublishers) {
<del> publisher = publisher.trim();
<del> for (final Map.Entry<String, String> replacement : replacements.entrySet()) {
<del> publisher = publisher.replaceAll(replacement.getKey(), replacement.getValue()).trim();
<del> }
<add> for (String publisher : rawPublishers) {
<add> publisher = publisher.trim();
<add> for (final Map.Entry<String, String> replacement : replacements.entrySet()) {
<add> publisher = publisher.replaceAll(replacement.getKey(), replacement.getValue()).trim();
<add> }
<ide>
<del> if (!publisher.isEmpty()) {
<del> publishers.add(publisher);
<del> }
<add> if (!publisher.isEmpty()) {
<add> publishers.add(publisher);
<ide> }
<ide> }
<ide> return publishers; |
|
JavaScript | mit | 2ed6cfa25db2e4edee7d9f1bf5e3a577dd88f1be | 0 | argosity/eslint-config-argosity | module.exports = {
extends: [
"plugin:react/recommended",
"plugin:@typescript-eslint/recommended",
'plugin:@typescript-eslint/eslint-recommended',
],
rules: {
indent: [
2, 4,
{ SwitchCase: 1 },
],
camelcase: 0,
yoda: 'off',
'no-plusplus': ['error', { 'allowForLoopAfterthoughts': true }],
'padded-blocks': ['error', { 'classes': 'always', 'blocks': 'never', 'switches': 'never' }],
'key-spacing': [2, {
singleLine: {
beforeColon: false,
afterColon: true
},
multiLine: {
beforeColon: false,
afterColon: true,
mode: 'minimum'
}
}],
'max-len': [ 'error', 100, 2, {
ignoreUrls: true,
ignoreComments: true,
ignoreRegExpLiterals: true,
ignoreStrings: true,
ignoreTemplateLiterals: true,
}],
'import/prefer-default-export': 0,
'default-case': 0,
'function-paren-newline': ['error', 'consistent'],
'class-methods-use-this': 0,
'no-underscore-dangle': 0,
'import/no-unresolved': 0,
'import/extensions': 0,
'react/jsx-uses-vars': [2],
'react/jsx-indent': [2, 4],
'react/jsx-indent-props': [0, 4],
'react/forbid-prop-types': 0,
'no-unused-vars': [2, {'varsIgnorePattern': '_+', 'ignoreRestSiblings': true}],
'react/prefer-stateless-function': [2, { ignorePureComponents: true }],
'import/no-extraneous-dependencies': [0, { devDependencies: true }],
'no-param-reassign': ['error', { 'props': false }],
'object-curly-newline': ['error', { 'consistent': true }],
'no-multi-spaces': [2, {
exceptions: {
Identifier: true,
ClassProperty: true,
ImportDeclaration: true,
VariableDeclarator: true,
AssignmentExpression: true,
JSXAttribute: true,
JSXIdentifier: true,
JSXOpeningElement: true,
JSXClosingElement: true,
},
}],
"default-case": 0,
"func-names:": 0,
"space-before-function-paren": 0,
"react/jsx-handler-names": 0,
"react/jsx-fragments": 0,
"react/no-unused-prop-types": 0,
"import/export": 0,
'react/prop-types': 0,
'import/prefer-default-export': 0,
'@typescript-eslint/no-explicit-any': 0,
"@typescript-eslint/no-use-before-define": 0,
'@typescript-eslint/no-unused-vars': ["error", { 'varsIgnorePattern': '_+', 'ignoreRestSiblings': true }],
"@typescript-eslint/no-non-null-assertion": 0,
"@typescript-eslint/explicit-function-return-type": 0,
"comma-dangle": ["error", {
"arrays": "always-multiline",
"objects": "always-multiline",
"imports": "always-multiline",
"exports": "always-multiline",
"functions": "always-multiline"
}],
"lines-between-class-members": ["error", "always", { "exceptAfterSingleLine": true }],
"semi": ["error", "never"],
},
globals: {
fetch: false
},
plugins: [
'react',
'jsx-a11y',
'import',
],
parser: 'babel-eslint',
parserOptions: {
ecmaFeatures: {
'experimentalObjectRestSpread': true,
'jsx': true
},
sourceType: 'module'
},
env: {
browser: true,
es6: true
},
};
| index.js | module.exports = {
extends: [
"plugin:react/recommended",
"plugin:@typescript-eslint/recommended",
'plugin:@typescript-eslint/eslint-recommended',
],
rules: {
indent: [
2, 4,
{ SwitchCase: 1 },
],
camelcase: 0,
yoda: 'off',
'no-plusplus': ['error', { 'allowForLoopAfterthoughts': true }],
'padded-blocks': ['error', { 'classes': 'always', 'blocks': 'never', 'switches': 'never' }],
'key-spacing': [2, {
singleLine: {
beforeColon: false,
afterColon: true
},
multiLine: {
beforeColon: false,
afterColon: true,
mode: 'minimum'
}
}],
'max-len': [ 'error', 100, 2, {
ignoreUrls: true,
ignoreComments: true,
ignoreRegExpLiterals: true,
ignoreStrings: true,
ignoreTemplateLiterals: true,
}],
'import/prefer-default-export': 0,
'default-case': 0,
'function-paren-newline': ['error', 'consistent'],
'class-methods-use-this': 0,
'no-underscore-dangle': 0,
'import/no-unresolved': 0,
'import/extensions': 0,
'react/jsx-uses-vars': [2],
'react/jsx-indent': [2, 4],
'react/jsx-indent-props': [0, 4],
'react/forbid-prop-types': 0,
'no-unused-vars': [2, {'varsIgnorePattern': '_+'}],
'react/prefer-stateless-function': [2, { ignorePureComponents: true }],
'import/no-extraneous-dependencies': [0, { devDependencies: true }],
'no-param-reassign': ['error', { 'props': false }],
'object-curly-newline': ['error', { 'consistent': true }],
'no-multi-spaces': [2, {
exceptions: {
Identifier: true,
ClassProperty: true,
ImportDeclaration: true,
VariableDeclarator: true,
AssignmentExpression: true,
JSXAttribute: true,
JSXIdentifier: true,
JSXOpeningElement: true,
JSXClosingElement: true,
},
}],
"default-case": 0,
"func-names:": 0,
"space-before-function-paren": 0,
"react/jsx-handler-names": 0,
"react/jsx-fragments": 0,
"react/no-unused-prop-types": 0,
"import/export": 0,
'react/prop-types': 0,
'import/prefer-default-export': 0,
'@typescript-eslint/no-explicit-any': 0,
"@typescript-eslint/no-use-before-define": 0,
"@typescript-eslint/no-unused-vars": ["error"],
"@typescript-eslint/no-non-null-assertion": 0,
"@typescript-eslint/explicit-function-return-type": 0,
"comma-dangle": ["error", {
"arrays": "always-multiline",
"objects": "always-multiline",
"imports": "always-multiline",
"exports": "always-multiline",
"functions": "always-multiline"
}],
"lines-between-class-members": ["error", "always", { "exceptAfterSingleLine": true }],
"semi": ["error", "never"],
},
globals: {
fetch: false
},
plugins: [
'react',
'jsx-a11y',
'import',
],
parser: 'babel-eslint',
parserOptions: {
ecmaFeatures: {
'experimentalObjectRestSpread': true,
'jsx': true
},
sourceType: 'module'
},
env: {
browser: true,
es6: true
},
};
| allow rest spread siblings to be ignored
| index.js | allow rest spread siblings to be ignored | <ide><path>ndex.js
<ide> 'react/jsx-indent': [2, 4],
<ide> 'react/jsx-indent-props': [0, 4],
<ide> 'react/forbid-prop-types': 0,
<del> 'no-unused-vars': [2, {'varsIgnorePattern': '_+'}],
<add> 'no-unused-vars': [2, {'varsIgnorePattern': '_+', 'ignoreRestSiblings': true}],
<ide> 'react/prefer-stateless-function': [2, { ignorePureComponents: true }],
<ide> 'import/no-extraneous-dependencies': [0, { devDependencies: true }],
<ide> 'no-param-reassign': ['error', { 'props': false }],
<ide> 'import/prefer-default-export': 0,
<ide> '@typescript-eslint/no-explicit-any': 0,
<ide> "@typescript-eslint/no-use-before-define": 0,
<del> "@typescript-eslint/no-unused-vars": ["error"],
<add> '@typescript-eslint/no-unused-vars': ["error", { 'varsIgnorePattern': '_+', 'ignoreRestSiblings': true }],
<ide> "@typescript-eslint/no-non-null-assertion": 0,
<ide> "@typescript-eslint/explicit-function-return-type": 0,
<ide> "comma-dangle": ["error", { |
|
Java | apache-2.0 | 1ab2bddd6d62e033d7f559195c732c9800a86b0a | 0 | danbernier/WordCram,danbernier/WordCram,danbernier/WordCram | package wordcram;
/*
Copyright 2010 Daniel Bernier
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.awt.Color;
import java.awt.Shape;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import processing.core.PFont;
import processing.core.PGraphics;
import processing.core.PGraphicsJava2D;
import processing.core.PVector;
class WordCramEngine {
private WordRenderer renderer;
private WordFonter fonter;
private WordSizer sizer;
private WordColorer colorer;
private WordAngler angler;
private WordPlacer placer;
private WordNudger nudger;
private Word[] words; // just a safe copy
private EngineWord[] eWords;
private int eWordIndex = -1;
private RenderOptions renderOptions;
private Observer observer;
// TODO Damn, really need to break down that list of arguments.
WordCramEngine(WordRenderer renderer, Word[] words, WordFonter fonter, WordSizer sizer, WordColorer colorer, WordAngler angler, WordPlacer placer, WordNudger nudger, WordShaper shaper, BBTreeBuilder bbTreeBuilder, RenderOptions renderOptions, Observer observer) {
this.renderer = renderer;
this.fonter = fonter;
this.sizer = sizer;
this.colorer = colorer;
this.angler = angler;
this.placer = placer;
this.nudger = nudger;
this.observer = observer;
this.renderOptions = renderOptions;
this.words = words;
this.eWords = wordsIntoEngineWords(words, shaper, bbTreeBuilder);
}
private EngineWord[] wordsIntoEngineWords(Word[] words, WordShaper wordShaper, BBTreeBuilder bbTreeBuilder) {
ArrayList<EngineWord> engineWords = new ArrayList<EngineWord>();
int maxNumberOfWords = words.length;
if (renderOptions.maxNumberOfWordsToDraw >= 0) {
maxNumberOfWords = Math.min(maxNumberOfWords, renderOptions.maxNumberOfWordsToDraw);
}
for (int i = 0; i < maxNumberOfWords; i++) {
Word word = words[i];
EngineWord eWord = new EngineWord(word, i, words.length, bbTreeBuilder);
PFont wordFont = word.getFont(fonter);
float wordSize = word.getSize(sizer, i, words.length);
float wordAngle = word.getAngle(angler);
Shape shape = wordShaper.getShapeFor(eWord.word.word, wordFont, wordSize, wordAngle);
if (isTooSmall(shape, renderOptions.minShapeSize)) {
skipWord(word, WordSkipReason.SHAPE_WAS_TOO_SMALL);
}
else {
eWord.setShape(shape, renderOptions.wordPadding);
engineWords.add(eWord); // DON'T add eWords with no shape.
}
}
for (int i = maxNumberOfWords; i < words.length; i++) {
skipWord(words[i], WordSkipReason.WAS_OVER_MAX_NUMBER_OF_WORDS);
}
return engineWords.toArray(new EngineWord[0]);
}
private boolean isTooSmall(Shape shape, int minShapeSize) {
if (minShapeSize < 1) {
minShapeSize = 1;
}
Rectangle2D r = shape.getBounds2D();
// Most words will be wider than tall, so this basically boils down to height.
// For the odd word like "I", we check width, too.
return r.getHeight() < minShapeSize || r.getWidth() < minShapeSize;
}
private void skipWord(Word word, WordSkipReason reason) {
// TODO delete these properties when starting a sketch, in case it's a re-run w/ the same words.
// NOTE: keep these as properties, because they (will be) deleted when the WordCramEngine re-runs.
word.wasSkippedBecause(reason);
observer.wordSkipped(word);
}
boolean hasMore() {
return eWordIndex < eWords.length-1;
}
void drawAll() {
observer.beginDraw();
while(hasMore()) {
drawNext();
}
renderer.finish();
observer.endDraw();
}
void drawNext() {
if (!hasMore()) return;
EngineWord eWord = eWords[++eWordIndex];
boolean wasPlaced = placeWord(eWord);
if (wasPlaced) { // TODO unit test (somehow)
drawWordImage(eWord);
observer.wordDrawn(eWord.word);
}
}
private boolean placeWord(EngineWord eWord) {
Word word = eWord.word;
Rectangle2D rect = eWord.getShape().getBounds2D(); // TODO can we move these into EngineWord.setDesiredLocation? Does that make sense?
int wordImageWidth = (int)rect.getWidth();
int wordImageHeight = (int)rect.getHeight();
eWord.setDesiredLocation(placer, eWords.length, wordImageWidth, wordImageHeight, renderer.getWidth(), renderer.getHeight());
// Set maximum number of placement trials
int maxAttemptsToPlace = renderOptions.maxAttemptsToPlaceWord > 0 ?
renderOptions.maxAttemptsToPlaceWord :
calculateMaxAttemptsFromWordWeight(word);
EngineWord lastCollidedWith = null;
for (int attempt = 0; attempt < maxAttemptsToPlace; attempt++) {
eWord.nudge(nudger.nudgeFor(word, attempt));
PVector loc = eWord.getCurrentLocation();
if (loc.x < 0 || loc.y < 0 || loc.x + wordImageWidth >= renderer.getWidth() || loc.y + wordImageHeight >= renderer.getHeight()) {
continue;
}
if (lastCollidedWith != null && eWord.overlaps(lastCollidedWith)) {
continue;
}
boolean foundOverlap = false;
for (int i = 0; !foundOverlap && i < eWordIndex; i++) {
EngineWord otherWord = eWords[i];
if (otherWord.wasSkipped()) continue; //can't overlap with skipped word
if (eWord.overlaps(otherWord)) {
foundOverlap = true;
lastCollidedWith = otherWord;
}
}
if (!foundOverlap) {
eWord.finalizeLocation();
return true;
}
}
skipWord(eWord.word, WordSkipReason.NO_SPACE);
return false;
}
private int calculateMaxAttemptsFromWordWeight(Word word) {
return (int)((1.0 - word.weight) * 600) + 100;
}
private void drawWordImage(EngineWord word) {
renderer.drawWord(word, new Color(word.word.getColor(colorer), true));
}
Word getWordAt(float x, float y) {
for (int i = eWords.length-1; i >= 0; i--) {
if (eWords[i].wasPlaced()) {
if (eWords[i].containsPoint(x, y)) {
return eWords[i].word;
}
}
}
return null;
}
Word[] getSkippedWords() {
ArrayList<Word> skippedWords = new ArrayList<Word>();
for (int i = 0; i < words.length; i++) {
if (words[i].wasSkipped()) {
skippedWords.add(words[i]);
}
}
return skippedWords.toArray(new Word[0]);
}
float getProgress() {
return (float) (this.eWordIndex+1) / this.eWords.length;
}
}
| src/wordcram/WordCramEngine.java | package wordcram;
/*
Copyright 2010 Daniel Bernier
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.awt.Color;
import java.awt.Shape;
import java.awt.geom.Rectangle2D;
import java.util.ArrayList;
import processing.core.PFont;
import processing.core.PGraphics;
import processing.core.PGraphicsJava2D;
import processing.core.PVector;
class WordCramEngine {
private WordRenderer renderer;
private WordFonter fonter;
private WordSizer sizer;
private WordColorer colorer;
private WordAngler angler;
private WordPlacer placer;
private WordNudger nudger;
private Word[] words; // just a safe copy
private EngineWord[] eWords;
private int eWordIndex = -1;
private RenderOptions renderOptions;
private Observer observer;
WordCramEngine(WordRenderer renderer, Word[] words, WordFonter fonter, WordSizer sizer, WordColorer colorer, WordAngler angler, WordPlacer placer, WordNudger nudger, WordShaper shaper, BBTreeBuilder bbTreeBuilder, RenderOptions renderOptions, Observer observer) {
this.renderer = renderer;
this.fonter = fonter;
this.sizer = sizer;
this.colorer = colorer;
this.angler = angler;
this.placer = placer;
this.nudger = nudger;
this.observer = observer;
this.renderOptions = renderOptions;
this.words = words;
this.eWords = wordsIntoEngineWords(words, shaper, bbTreeBuilder);
}
private EngineWord[] wordsIntoEngineWords(Word[] words, WordShaper wordShaper, BBTreeBuilder bbTreeBuilder) {
ArrayList<EngineWord> engineWords = new ArrayList<EngineWord>();
int maxNumberOfWords = words.length;
if (renderOptions.maxNumberOfWordsToDraw >= 0) {
maxNumberOfWords = Math.min(maxNumberOfWords, renderOptions.maxNumberOfWordsToDraw);
}
for (int i = 0; i < maxNumberOfWords; i++) {
Word word = words[i];
EngineWord eWord = new EngineWord(word, i, words.length, bbTreeBuilder);
PFont wordFont = word.getFont(fonter);
float wordSize = word.getSize(sizer, i, words.length);
float wordAngle = word.getAngle(angler);
Shape shape = wordShaper.getShapeFor(eWord.word.word, wordFont, wordSize, wordAngle);
if (isTooSmall(shape, renderOptions.minShapeSize)) {
skipWord(word, WordSkipReason.SHAPE_WAS_TOO_SMALL);
}
else {
eWord.setShape(shape, renderOptions.wordPadding);
engineWords.add(eWord); // DON'T add eWords with no shape.
}
}
for (int i = maxNumberOfWords; i < words.length; i++) {
skipWord(words[i], WordSkipReason.WAS_OVER_MAX_NUMBER_OF_WORDS);
}
return engineWords.toArray(new EngineWord[0]);
}
private boolean isTooSmall(Shape shape, int minShapeSize) {
if (minShapeSize < 1) {
minShapeSize = 1;
}
Rectangle2D r = shape.getBounds2D();
// Most words will be wider than tall, so this basically boils down to height.
// For the odd word like "I", we check width, too.
return r.getHeight() < minShapeSize || r.getWidth() < minShapeSize;
}
private void skipWord(Word word, WordSkipReason reason) {
// TODO delete these properties when starting a sketch, in case it's a re-run w/ the same words.
// NOTE: keep these as properties, because they (will be) deleted when the WordCramEngine re-runs.
word.wasSkippedBecause(reason);
observer.wordSkipped(word);
}
boolean hasMore() {
return eWordIndex < eWords.length-1;
}
void drawAll() {
observer.beginDraw();
while(hasMore()) {
drawNext();
}
renderer.finish();
observer.endDraw();
}
void drawNext() {
if (!hasMore()) return;
EngineWord eWord = eWords[++eWordIndex];
boolean wasPlaced = placeWord(eWord);
if (wasPlaced) { // TODO unit test (somehow)
drawWordImage(eWord);
observer.wordDrawn(eWord.word);
}
}
private boolean placeWord(EngineWord eWord) {
Word word = eWord.word;
Rectangle2D rect = eWord.getShape().getBounds2D(); // TODO can we move these into EngineWord.setDesiredLocation? Does that make sense?
int wordImageWidth = (int)rect.getWidth();
int wordImageHeight = (int)rect.getHeight();
eWord.setDesiredLocation(placer, eWords.length, wordImageWidth, wordImageHeight, renderer.getWidth(), renderer.getHeight());
// Set maximum number of placement trials
int maxAttemptsToPlace = renderOptions.maxAttemptsToPlaceWord > 0 ?
renderOptions.maxAttemptsToPlaceWord :
calculateMaxAttemptsFromWordWeight(word);
EngineWord lastCollidedWith = null;
for (int attempt = 0; attempt < maxAttemptsToPlace; attempt++) {
eWord.nudge(nudger.nudgeFor(word, attempt));
PVector loc = eWord.getCurrentLocation();
if (loc.x < 0 || loc.y < 0 || loc.x + wordImageWidth >= renderer.getWidth() || loc.y + wordImageHeight >= renderer.getHeight()) {
continue;
}
if (lastCollidedWith != null && eWord.overlaps(lastCollidedWith)) {
continue;
}
boolean foundOverlap = false;
for (int i = 0; !foundOverlap && i < eWordIndex; i++) {
EngineWord otherWord = eWords[i];
if (otherWord.wasSkipped()) continue; //can't overlap with skipped word
if (eWord.overlaps(otherWord)) {
foundOverlap = true;
lastCollidedWith = otherWord;
}
}
if (!foundOverlap) {
eWord.finalizeLocation();
return true;
}
}
skipWord(eWord.word, WordSkipReason.NO_SPACE);
return false;
}
private int calculateMaxAttemptsFromWordWeight(Word word) {
return (int)((1.0 - word.weight) * 600) + 100;
}
private void drawWordImage(EngineWord word) {
renderer.drawWord(word, new Color(word.word.getColor(colorer), true));
}
Word getWordAt(float x, float y) {
for (int i = eWords.length-1; i >= 0; i--) {
if (eWords[i].wasPlaced()) {
if (eWords[i].containsPoint(x, y)) {
return eWords[i].word;
}
}
}
return null;
}
Word[] getSkippedWords() {
ArrayList<Word> skippedWords = new ArrayList<Word>();
for (int i = 0; i < words.length; i++) {
if (words[i].wasSkipped()) {
skippedWords.add(words[i]);
}
}
return skippedWords.toArray(new Word[0]);
}
float getProgress() {
return (float) (this.eWordIndex+1) / this.eWords.length;
}
}
| Add a TODO to WordCramengine
| src/wordcram/WordCramEngine.java | Add a TODO to WordCramengine | <ide><path>rc/wordcram/WordCramEngine.java
<ide> private RenderOptions renderOptions;
<ide> private Observer observer;
<ide>
<add> // TODO Damn, really need to break down that list of arguments.
<ide> WordCramEngine(WordRenderer renderer, Word[] words, WordFonter fonter, WordSizer sizer, WordColorer colorer, WordAngler angler, WordPlacer placer, WordNudger nudger, WordShaper shaper, BBTreeBuilder bbTreeBuilder, RenderOptions renderOptions, Observer observer) {
<ide> this.renderer = renderer;
<ide> |
|
Java | artistic-2.0 | 25f8992b23ef6844480543cfa67bb7359df70112 | 0 | TheArchives/Painter | package com.archivesmc.painter.listeners;
import com.archivesmc.painter.Painter;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.block.BlockState;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.inventory.ItemStack;
import java.util.HashMap;
import java.util.Map;
public class PlayerInteractListener implements Listener {
Painter plugin;
public PlayerInteractListener(Painter plugin) {
this.plugin = plugin;
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onPlayerInteractEvent(PlayerInteractEvent event) {
// if(! event.isCancelled()) {
Player player = event.getPlayer();
if (this.plugin.range_painters.contains(player.getUniqueId()) &&
event.getAction() == Action.LEFT_CLICK_AIR
) {
this.plugin.getLogger().info("Event!");
if (! this.plugin.permissions.has(player, "painter.replace.range")) {
this.plugin.range_painters.remove(player.getUniqueId());
Map<String, String> args = new HashMap<>();
args.put("permission", "painter.replace.range");
args.put("name", player.getName());
this.plugin.sendMessage(player, "range_replace_perm_lost", args);
return;
}
ItemStack items = player.getItemInHand();
Material heldMat = items.getType();
if (heldMat.isBlock()) {
this.plugin.getLogger().info("It's a block!");
Block block = player.getTargetBlock(null, 100);
BlockState oldBlockState = block.getState();
block.setType(heldMat);
block.setData(items.getData().getData());
event.setCancelled(true);
// Log it if it's being logged
this.plugin.blockPainted(player, oldBlockState, block.getState(), block);
}
}
// }
}
}
| src/main/java/com/archivesmc/painter/listeners/PlayerInteractListener.java | package com.archivesmc.painter.listeners;
import com.archivesmc.painter.Painter;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.block.BlockState;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.inventory.ItemStack;
import java.util.HashMap;
import java.util.Map;
public class PlayerInteractListener implements Listener {
Painter plugin;
public PlayerInteractListener(Painter plugin) {
this.plugin = plugin;
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onPlayerInteractEvent(PlayerInteractEvent event) {
if(! event.isCancelled()) {
Player player = event.getPlayer();
if (this.plugin.range_painters.contains(player.getUniqueId())
&& event.getAction() == Action.LEFT_CLICK_AIR) {
if (! this.plugin.permissions.has(player, "painter.replace.range")) {
this.plugin.range_painters.remove(player.getUniqueId());
Map<String, String> args = new HashMap<>();
args.put("permission", "painter.replace.range");
args.put("name", player.getName());
this.plugin.sendMessage(player, "range_replace_perm_lost", args);
return;
}
ItemStack items = player.getItemInHand();
Material heldMat = items.getType();
if (heldMat.isBlock()) {
Block block = player.getTargetBlock(null, 100);
BlockState oldBlockState = block.getState();
block.setType(heldMat);
block.setData(items.getData().getData());
event.setCancelled(true);
// Log it if it's being logged
this.plugin.blockPainted(player, oldBlockState, block.getState(), block);
}
}
}
}
}
| PlayerInteract is cancelled by default. Please note that ranged replace won't respect protection plugins for now.
| src/main/java/com/archivesmc/painter/listeners/PlayerInteractListener.java | PlayerInteract is cancelled by default. Please note that ranged replace won't respect protection plugins for now. | <ide><path>rc/main/java/com/archivesmc/painter/listeners/PlayerInteractListener.java
<ide>
<ide> @EventHandler(priority = EventPriority.HIGHEST)
<ide> public void onPlayerInteractEvent(PlayerInteractEvent event) {
<del> if(! event.isCancelled()) {
<add>// if(! event.isCancelled()) {
<ide> Player player = event.getPlayer();
<ide>
<del> if (this.plugin.range_painters.contains(player.getUniqueId())
<del> && event.getAction() == Action.LEFT_CLICK_AIR) {
<add> if (this.plugin.range_painters.contains(player.getUniqueId()) &&
<add> event.getAction() == Action.LEFT_CLICK_AIR
<add> ) {
<add> this.plugin.getLogger().info("Event!");
<ide> if (! this.plugin.permissions.has(player, "painter.replace.range")) {
<ide> this.plugin.range_painters.remove(player.getUniqueId());
<ide>
<ide> Material heldMat = items.getType();
<ide>
<ide> if (heldMat.isBlock()) {
<add> this.plugin.getLogger().info("It's a block!");
<ide> Block block = player.getTargetBlock(null, 100);
<ide> BlockState oldBlockState = block.getState();
<ide>
<ide> this.plugin.blockPainted(player, oldBlockState, block.getState(), block);
<ide> }
<ide> }
<del> }
<add>// }
<ide> }
<ide> } |
|
Java | apache-2.0 | 55d654484c31b628c101bc2c43a66fd7f57918dd | 0 | jagguli/intellij-community,apixandru/intellij-community,apixandru/intellij-community,allotria/intellij-community,consulo/consulo,amith01994/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,Lekanich/intellij-community,izonder/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,joewalnes/idea-community,idea4bsd/idea4bsd,blademainer/intellij-community,da1z/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,da1z/intellij-community,robovm/robovm-studio,robovm/robovm-studio,TangHao1987/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,ahb0327/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,diorcety/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,samthor/intellij-community,izonder/intellij-community,asedunov/intellij-community,allotria/intellij-community,hurricup/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,vladmm/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,da1z/intellij-community,petteyg/intellij-community,slisson/intellij-community,kool79/intellij-community,vladmm/intellij-community,fitermay/intellij-community,consulo/consulo,hurricup/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,allotria/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,semonte/intellij-community,da1z/intellij-community,supersven/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,caot/intellij-community,jexp/idea2,ernestp/consulo,petteyg/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,holmes/intellij-community,blademainer/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,retomerz/intellij-community,allotria/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,jexp/idea2,salguarnieri/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,joewalnes/idea-community,semonte/intellij-community,tmpgit/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,kdwink/intellij-community,allotria/intellij-community,slisson/intellij-community,kool79/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,apixandru/intellij-community,ryano144/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,izonder/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,da1z/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,xfournet/intellij-community,signed/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,signed/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,da1z/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,akosyakov/intellij-community,robovm/robovm-studio,retomerz/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,slisson/intellij-community,clumsy/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,akosyakov/intellij-community,signed/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,samthor/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,semonte/intellij-community,holmes/intellij-community,asedunov/intellij-community,holmes/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,jexp/idea2,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,vvv1559/intellij-community,holmes/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,ernestp/consulo,mglukhikh/intellij-community,ryano144/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,joewalnes/idea-community,joewalnes/idea-community,clumsy/intellij-community,suncycheng/intellij-community,da1z/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,signed/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,supersven/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,joewalnes/idea-community,vvv1559/intellij-community,samthor/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,caot/intellij-community,ol-loginov/intellij-community,caot/intellij-community,suncycheng/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,clumsy/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,caot/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,signed/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,semonte/intellij-community,ernestp/consulo,amith01994/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,allotria/intellij-community,clumsy/intellij-community,consulo/consulo,ibinti/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,holmes/intellij-community,slisson/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,kool79/intellij-community,izonder/intellij-community,caot/intellij-community,kool79/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,apixandru/intellij-community,consulo/consulo,semonte/intellij-community,da1z/intellij-community,retomerz/intellij-community,ibinti/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,robovm/robovm-studio,dslomov/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,semonte/intellij-community,blademainer/intellij-community,dslomov/intellij-community,ryano144/intellij-community,caot/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,consulo/consulo,robovm/robovm-studio,retomerz/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,joewalnes/idea-community,retomerz/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,signed/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,FHannes/intellij-community,jexp/idea2,diorcety/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,dslomov/intellij-community,amith01994/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,xfournet/intellij-community,holmes/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,joewalnes/idea-community,ryano144/intellij-community,amith01994/intellij-community,fitermay/intellij-community,Lekanich/intellij-community,caot/intellij-community,ernestp/consulo,fengbaicanhe/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,hurricup/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,dslomov/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,ftomassetti/intellij-community,jexp/idea2,caot/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,semonte/intellij-community,da1z/intellij-community,izonder/intellij-community,suncycheng/intellij-community,ernestp/consulo,blademainer/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,jexp/idea2,kool79/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,adedayo/intellij-community,signed/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,kool79/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,Distrotech/intellij-community,supersven/intellij-community,tmpgit/intellij-community,samthor/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,hurricup/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,amith01994/intellij-community,jagguli/intellij-community,kdwink/intellij-community,samthor/intellij-community,amith01994/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,signed/intellij-community,caot/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,kool79/intellij-community,retomerz/intellij-community,petteyg/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,jexp/idea2,signed/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,ryano144/intellij-community,jexp/idea2,amith01994/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,blademainer/intellij-community,FHannes/intellij-community,supersven/intellij-community,signed/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,ibinti/intellij-community,fitermay/intellij-community,retomerz/intellij-community,holmes/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,ernestp/consulo,ibinti/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,joewalnes/idea-community,petteyg/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,caot/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,suncycheng/intellij-community,supersven/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,FHannes/intellij-community,caot/intellij-community,samthor/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,fitermay/intellij-community,clumsy/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,joewalnes/idea-community,da1z/intellij-community,FHannes/intellij-community,holmes/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,fitermay/intellij-community,signed/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,vladmm/intellij-community,petteyg/intellij-community,dslomov/intellij-community,ryano144/intellij-community,consulo/consulo,petteyg/intellij-community,amith01994/intellij-community,ibinti/intellij-community,FHannes/intellij-community,jagguli/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,izonder/intellij-community,petteyg/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,allotria/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,slisson/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,samthor/intellij-community,allotria/intellij-community,samthor/intellij-community,diorcety/intellij-community,clumsy/intellij-community,izonder/intellij-community,petteyg/intellij-community,kdwink/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,FHannes/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,kdwink/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community | /*
* Copyright 2000-2007 JetBrains s.r.o.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.psi.impl.statements.blocks;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.psi.*;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.Function;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes;
import org.jetbrains.plugins.groovy.lang.psi.*;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameter;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.GrTopStatement;
import org.jetbrains.plugins.groovy.lang.psi.impl.*;
import org.jetbrains.plugins.groovy.lang.psi.impl.statements.params.GrParameterListImpl;
import org.jetbrains.plugins.groovy.lang.resolve.MethodTypeInferencer;
import org.jetbrains.plugins.groovy.lang.resolve.ResolveUtil;
import org.jetbrains.plugins.groovy.lang.resolve.processors.ResolverProcessor;
/**
* @author ilyas
*/
public class GrClosableBlockImpl extends GrBlockImpl implements GrClosableBlock {
private static final Logger LOG = Logger.getInstance("org.jetbrains.plugins.groovy.lang.psi.impl.statements.blocks.GrClosableBlockImpl");
private GrParameter mySyntheticItParameter;
private GrVariable myOwner;
private static final String SYNTHETIC_PARAMETER_NAME = "it";
private static final String OWNER_NAME = "owner";
public GrClosableBlockImpl(@NotNull ASTNode node) {
super(node);
}
public void accept(GroovyElementVisitor visitor) {
visitor.visitClosure(this);
}
public boolean processDeclarations(@NotNull PsiScopeProcessor processor, @NotNull PsiSubstitutor substitutor, PsiElement lastParent, @NotNull PsiElement place) {
if (processor instanceof ResolverProcessor) ((ResolverProcessor) processor).setCurrentFileResolveContext(this);
try {
if (!super.processDeclarations(processor, substitutor, lastParent, place)) return false;
for (final GrParameter parameter : getParameters()) {
if (!ResolveUtil.processElement(processor, parameter)) return false;
}
if (!ResolveUtil.processElement(processor, getOwner())) return false;
final PsiClass closureClass = getManager().findClass(GROOVY_LANG_CLOSURE, getResolveScope());
if (closureClass != null && !closureClass.processDeclarations(processor, substitutor, lastParent, place)) return false;
return true;
} finally {
if (processor instanceof ResolverProcessor) ((ResolverProcessor) processor).setCurrentFileResolveContext(null);
}
}
public String toString() {
return "Closable block";
}
public GrParameter[] getParameters() {
if (hasParametersSection()) {
GrParameterListImpl parameterList = getParameterList();
if (parameterList != null) {
return parameterList.getParameters();
}
return GrParameter.EMPTY_ARRAY;
}
return new GrParameter[]{getSyntheticItParameter()};
}
public GrParameterListImpl getParameterList() {
return findChildByClass(GrParameterListImpl.class);
}
public boolean hasParametersSection() {
return findChildByType(GroovyElementTypes.mCLOSABLE_BLOCK_OP) != null;
}
public PsiType getType() {
return GrClosureType.create(this);
}
@Nullable
public PsiType getNominalType() {
return getType();
}
public void subtreeChanged() {
super.subtreeChanged();
mySyntheticItParameter = null;
}
public GrParameter getSyntheticItParameter() {
if (mySyntheticItParameter == null) {
try {
mySyntheticItParameter = GroovyElementFactory.getInstance(getProject()).createParameter(SYNTHETIC_PARAMETER_NAME, null, this);
} catch (IncorrectOperationException e) {
LOG.error(e);
}
}
return mySyntheticItParameter;
}
private GrVariable getOwner() {
if (myOwner == null) {
final GroovyPsiElement context = PsiTreeUtil.getParentOfType(this, GrTypeDefinition.class, GrClosableBlock.class, GroovyFile.class);
final PsiElementFactory factory = getManager().getElementFactory();
PsiType type = null;
if (context instanceof GrTypeDefinition) {
type = factory.createType((PsiClass) context);
} else if (context instanceof GrClosableBlock) {
type = GrClosureType.create((GrClosableBlock) context);
} else if (context instanceof GroovyFile) {
final PsiClass scriptClass = ((GroovyFile) context).getScriptClass();
if (scriptClass != null) type = factory.createType(scriptClass);
}
if (type == null) {
type = factory.createTypeByFQClassName("java.lang.Object", getResolveScope());
}
myOwner = GroovyElementFactory.getInstance(getProject()).createVariableDeclaration(null, OWNER_NAME, null, type).getVariables()[0];
}
return myOwner;
}
public GrExpression replaceWithExpression(@NotNull GrExpression newExpr, boolean removeUnnecessaryParentheses) throws IncorrectOperationException {
return PsiImplUtil.replaceExpression(this, newExpr, removeUnnecessaryParentheses);
}
private static Function<GrClosableBlock, PsiType> ourTypesCalculator = new Function<GrClosableBlock, PsiType>() {
public PsiType fun(GrClosableBlock block) {
return GroovyPsiManager.getInstance(block.getProject()).inferType(block, new MethodTypeInferencer(block));
}
};
public @Nullable PsiType getReturnType(){
if (GroovyPsiManager.getInstance(getProject()).isTypeBeingInferred(this)) {
return null;
}
return GroovyPsiManager.getInstance(getProject()).getType(this, ourTypesCalculator);
}
} | plugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/impl/statements/blocks/GrClosableBlockImpl.java | /*
* Copyright 2000-2007 JetBrains s.r.o.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.psi.impl.statements.blocks;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.psi.*;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.Function;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes;
import org.jetbrains.plugins.groovy.lang.psi.GroovyElementFactory;
import org.jetbrains.plugins.groovy.lang.psi.GroovyElementVisitor;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameter;
import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.GrTopStatement;
import org.jetbrains.plugins.groovy.lang.psi.impl.*;
import org.jetbrains.plugins.groovy.lang.psi.impl.statements.params.GrParameterListImpl;
import org.jetbrains.plugins.groovy.lang.resolve.MethodTypeInferencer;
import org.jetbrains.plugins.groovy.lang.resolve.ResolveUtil;
import org.jetbrains.plugins.groovy.lang.resolve.processors.ResolverProcessor;
/**
* @author ilyas
*/
public class GrClosableBlockImpl extends GrBlockImpl implements GrClosableBlock {
private static final Logger LOG = Logger.getInstance("org.jetbrains.plugins.groovy.lang.psi.impl.statements.blocks.GrClosableBlockImpl");
private GrParameter mySyntheticItParameter;
private GrVariable myOwner;
private static final String SYNTHETIC_PARAMETER_NAME = "it";
private static final String OWNER_NAME = "owner";
public GrClosableBlockImpl(@NotNull ASTNode node) {
super(node);
}
public void accept(GroovyElementVisitor visitor) {
visitor.visitClosure(this);
}
public boolean processDeclarations(@NotNull PsiScopeProcessor processor, @NotNull PsiSubstitutor substitutor, PsiElement lastParent, @NotNull PsiElement place) {
if (processor instanceof ResolverProcessor) ((ResolverProcessor) processor).setCurrentFileResolveContext(this);
try {
if (!super.processDeclarations(processor, substitutor, lastParent, place)) return false;
for (final GrParameter parameter : getParameters()) {
if (!ResolveUtil.processElement(processor, parameter)) return false;
}
if (!ResolveUtil.processElement(processor, getOwner())) return false;
final PsiClass closureClass = getManager().findClass(GROOVY_LANG_CLOSURE, getResolveScope());
if (closureClass != null && !closureClass.processDeclarations(processor, substitutor, lastParent, place)) return false;
return true;
} finally {
if (processor instanceof ResolverProcessor) ((ResolverProcessor) processor).setCurrentFileResolveContext(null);
}
}
public String toString() {
return "Closable block";
}
public GrParameter[] getParameters() {
if (hasParametersSection()) {
GrParameterListImpl parameterList = getParameterList();
if (parameterList != null) {
return parameterList.getParameters();
}
return GrParameter.EMPTY_ARRAY;
}
return new GrParameter[]{getSyntheticItParameter()};
}
public GrParameterListImpl getParameterList() {
return findChildByClass(GrParameterListImpl.class);
}
public boolean hasParametersSection() {
return findChildByType(GroovyElementTypes.mCLOSABLE_BLOCK_OP) != null;
}
public PsiType getType() {
return GrClosureType.create(this);
}
@Nullable
public PsiType getNominalType() {
return getType();
}
public void subtreeChanged() {
super.subtreeChanged();
mySyntheticItParameter = null;
}
public GrParameter getSyntheticItParameter() {
if (mySyntheticItParameter == null) {
try {
mySyntheticItParameter = GroovyElementFactory.getInstance(getProject()).createParameter(SYNTHETIC_PARAMETER_NAME, null, this);
} catch (IncorrectOperationException e) {
LOG.error(e);
}
}
return mySyntheticItParameter;
}
private GrVariable getOwner() {
if (myOwner == null) {
final GrTopStatement context = PsiTreeUtil.getParentOfType(this, GrTypeDefinition.class, GrClosableBlock.class);
final PsiElementFactory factory = getManager().getElementFactory();
PsiType type;
if (context instanceof GrTypeDefinition) {
type = factory.createType((PsiClass) context);
} else if (context instanceof GrClosableBlock) {
type = GrClosureType.create((GrClosableBlock) context);
} else {
type = factory.createTypeByFQClassName("java.lang.Object", getResolveScope());
}
myOwner = GroovyElementFactory.getInstance(getProject()).createVariableDeclaration(null, OWNER_NAME, null, type).getVariables()[0];
}
return myOwner;
}
public GrExpression replaceWithExpression(@NotNull GrExpression newExpr, boolean removeUnnecessaryParentheses) throws IncorrectOperationException {
return PsiImplUtil.replaceExpression(this, newExpr, removeUnnecessaryParentheses);
}
private static Function<GrClosableBlock, PsiType> ourTypesCalculator = new Function<GrClosableBlock, PsiType>() {
public PsiType fun(GrClosableBlock block) {
return GroovyPsiManager.getInstance(block.getProject()).inferType(block, new MethodTypeInferencer(block));
}
};
public @Nullable PsiType getReturnType(){
if (GroovyPsiManager.getInstance(getProject()).isTypeBeingInferred(this)) {
return null;
}
return GroovyPsiManager.getInstance(getProject()).getType(this, ourTypesCalculator);
}
} | correct 'owner' for script scoped closures | plugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/impl/statements/blocks/GrClosableBlockImpl.java | correct 'owner' for script scoped closures | <ide><path>lugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/impl/statements/blocks/GrClosableBlockImpl.java
<ide> import org.jetbrains.annotations.NotNull;
<ide> import org.jetbrains.annotations.Nullable;
<ide> import org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes;
<del>import org.jetbrains.plugins.groovy.lang.psi.GroovyElementFactory;
<del>import org.jetbrains.plugins.groovy.lang.psi.GroovyElementVisitor;
<add>import org.jetbrains.plugins.groovy.lang.psi.*;
<ide> import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable;
<del>import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
<ide> import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrClosableBlock;
<ide> import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
<ide> import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameter;
<add>import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition;
<ide> import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.GrTopStatement;
<ide> import org.jetbrains.plugins.groovy.lang.psi.impl.*;
<ide> import org.jetbrains.plugins.groovy.lang.psi.impl.statements.params.GrParameterListImpl;
<ide>
<ide> private GrVariable getOwner() {
<ide> if (myOwner == null) {
<del> final GrTopStatement context = PsiTreeUtil.getParentOfType(this, GrTypeDefinition.class, GrClosableBlock.class);
<add> final GroovyPsiElement context = PsiTreeUtil.getParentOfType(this, GrTypeDefinition.class, GrClosableBlock.class, GroovyFile.class);
<ide> final PsiElementFactory factory = getManager().getElementFactory();
<del> PsiType type;
<add> PsiType type = null;
<ide> if (context instanceof GrTypeDefinition) {
<ide> type = factory.createType((PsiClass) context);
<ide> } else if (context instanceof GrClosableBlock) {
<ide> type = GrClosureType.create((GrClosableBlock) context);
<del> } else {
<add> } else if (context instanceof GroovyFile) {
<add> final PsiClass scriptClass = ((GroovyFile) context).getScriptClass();
<add> if (scriptClass != null) type = factory.createType(scriptClass);
<add> }
<add> if (type == null) {
<ide> type = factory.createTypeByFQClassName("java.lang.Object", getResolveScope());
<ide> }
<add>
<ide> myOwner = GroovyElementFactory.getInstance(getProject()).createVariableDeclaration(null, OWNER_NAME, null, type).getVariables()[0];
<ide> }
<ide> |
|
Java | mit | e53b67a21d3004d55427bca01540dc093aab3663 | 0 | eaglesakura/simple-utils | package com.eaglesakura.util;
import java.lang.reflect.Method;
import java.util.UUID;
/**
* 乱数生成用Util
*/
public class RandomUtil {
/**
* ランダムな真偽値を生成する
*/
public static boolean randBool() {
return randInt8() % 2 == 0;
}
/**
* 1byteの整数を生成する
*/
public static byte randInt8() {
return (byte) ((Math.random() * 255) - 128);
}
/**
* 2byteの整数を生成する
*/
public static short randInt16() {
return (short) (Math.random() * (double) 0x0000FFFF);
}
/**
* 4byteの整数を生成する
*/
public static int randInt32() {
return (int) (Math.random() * (double) (0x00000000FFFFFFFFL));
}
/**
* 8byteの整数を生成する
*/
public static long randInt64() {
return ((long) randInt32() & 0x00000000FFFFFFFFL) << 32 | ((long) randInt32() & 0x00000000FFFFFFFFL);
}
/**
* 0~127の整数を生成する
*/
public static byte randUInt8() {
return (byte) (Math.random() * 127);
}
/**
* 2byteの0以上整数を生成する
*/
public static short randUInt16() {
return (short) ((int) randInt16() & 0x00007FFFF);
}
/**
* 4byteの0以上整数を生成する
*/
public static int randUInt32() {
return randInt32() & 0x7FFFFFFF;
}
/**
* 8byteの0以上整数を生成する
*/
public static long randUInt64() {
return randInt64() & 0x7FFFFFFFFFFFFFFFL;
}
/**
* 0.0~1.0の乱数を生成する
*/
public static float randFloat() {
return (float) Math.random();
}
/**
* ランダムな文字列を生成する
*/
public static String randString() {
return UUID.randomUUID().toString();
}
/**
* ランダムである程度長い文字列を生成する
*/
public static String randLargeString() {
StringBuffer result = new StringBuffer();
for (int i = 0; i < 256; ++i) {
result.append(randString());
result.append("-");
}
return result.toString();
}
/**
* ランダムな長さ・内容のバイト配列を生成する
*/
public static byte[] randBytes() {
byte[] buffer = new byte[32 + randUInt8()];
for (int i = 0; i < buffer.length; ++i) {
buffer[i] = randInt8();
}
return buffer;
}
/**
* ランダムなenumを取得する
*/
public static <T extends Enum> T randEnum(Class<T> clazz) {
try {
Method valuesMethod = clazz.getMethod("values");
T[] values = (T[]) valuesMethod.invoke(clazz);
return values[randUInt8() % values.length];
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
/**
* nullを含めたランダムなenumを取得する
*/
public static <T extends Enum> T randEnumWithNull(Class<T> clazz) {
try {
Method valuesMethod = clazz.getMethod("values");
T[] values = (T[]) valuesMethod.invoke(clazz);
if (randUInt8() % (values.length + 1) == 0) {
return null;
} else {
return values[randUInt8() % values.length];
}
} catch (Exception e) {
return null;
}
}
}
| src/main/java/com/eaglesakura/util/RandomUtil.java | package com.eaglesakura.util;
import java.util.UUID;
/**
* 乱数生成用Util
*/
public class RandomUtil {
/**
* ランダムな真偽値を生成する
*/
public static boolean randBool() {
return randInt8() % 2 == 0;
}
/**
* 1byteの整数を生成する
*/
public static byte randInt8() {
return (byte) ((Math.random() * 255) - 128);
}
/**
* 2byteの整数を生成する
*/
public static short randInt16() {
return (short) (Math.random() * (double) 0x0000FFFF);
}
/**
* 4byteの整数を生成する
*/
public static int randInt32() {
return (int) (Math.random() * (double) (0x00000000FFFFFFFFL));
}
/**
* 8byteの整数を生成する
*/
public static long randInt64() {
return ((long) randInt32() & 0x00000000FFFFFFFFL) << 32 | ((long) randInt32() & 0x00000000FFFFFFFFL);
}
/**
* 0~127の整数を生成する
*/
public static byte randUInt8() {
return (byte) (Math.random() * 127);
}
/**
* 2byteの0以上整数を生成する
*/
public static short randUInt16() {
return (short) ((int) randInt16() & 0x00007FFFF);
}
/**
* 4byteの0以上整数を生成する
*/
public static int randUInt32() {
return randInt32() & 0x7FFFFFFF;
}
/**
* 8byteの0以上整数を生成する
*/
public static long randUInt64() {
return randInt64() & 0x7FFFFFFFFFFFFFFFL;
}
/**
* 0.0~1.0の乱数を生成する
*/
public static float randFloat() {
return (float) Math.random();
}
/**
* ランダムな文字列を生成する
*/
public static String randString() {
return UUID.randomUUID().toString();
}
/**
* ランダムである程度長い文字列を生成する
*/
public static String randLargeString() {
StringBuffer result = new StringBuffer();
for (int i = 0; i < 256; ++i) {
result.append(randString());
result.append("-");
}
return result.toString();
}
/**
* ランダムな長さ・内容のバイト配列を生成する
*/
public static byte[] randBytes() {
byte[] buffer = new byte[32 + randUInt8()];
for (int i = 0; i < buffer.length; ++i) {
buffer[i] = randInt8();
}
return buffer;
}
}
| enum生成を追加
| src/main/java/com/eaglesakura/util/RandomUtil.java | enum生成を追加 | <ide><path>rc/main/java/com/eaglesakura/util/RandomUtil.java
<ide> package com.eaglesakura.util;
<ide>
<add>import java.lang.reflect.Method;
<ide> import java.util.UUID;
<ide>
<ide> /**
<ide> }
<ide> return buffer;
<ide> }
<add>
<add>
<add> /**
<add> * ランダムなenumを取得する
<add> */
<add> public static <T extends Enum> T randEnum(Class<T> clazz) {
<add> try {
<add> Method valuesMethod = clazz.getMethod("values");
<add> T[] values = (T[]) valuesMethod.invoke(clazz);
<add> return values[randUInt8() % values.length];
<add> } catch (Exception e) {
<add> throw new IllegalStateException(e);
<add> }
<add> }
<add>
<add> /**
<add> * nullを含めたランダムなenumを取得する
<add> */
<add> public static <T extends Enum> T randEnumWithNull(Class<T> clazz) {
<add> try {
<add> Method valuesMethod = clazz.getMethod("values");
<add> T[] values = (T[]) valuesMethod.invoke(clazz);
<add> if (randUInt8() % (values.length + 1) == 0) {
<add> return null;
<add> } else {
<add> return values[randUInt8() % values.length];
<add> }
<add> } catch (Exception e) {
<add> return null;
<add> }
<add> }
<ide> } |
|
Java | mit | fcad40d993b5388474ff076d06ff1b6037c0089c | 0 | TeamWizardry/TMT-Refraction | package com.teamwizardry.refraction.api.beam;
import com.teamwizardry.librarianlib.client.fx.particle.ParticleBuilder;
import com.teamwizardry.librarianlib.client.fx.particle.ParticleSpawner;
import com.teamwizardry.librarianlib.client.fx.particle.functions.InterpFadeInOut;
import com.teamwizardry.librarianlib.common.util.math.interpolate.StaticInterp;
import com.teamwizardry.refraction.api.ConfigValues;
import com.teamwizardry.refraction.api.Constants;
import com.teamwizardry.refraction.api.Utils;
import com.teamwizardry.refraction.api.beam.Effect.EffectType;
import com.teamwizardry.refraction.api.beam.modes.BeamMode;
import com.teamwizardry.refraction.api.beam.modes.BeamModeRegistry;
import com.teamwizardry.refraction.api.beam.modes.ModeEffect;
import com.teamwizardry.refraction.api.raytrace.EntityTrace;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.util.math.Vec3d;
import net.minecraft.world.World;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.common.util.INBTSerializable;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.common.eventhandler.Event;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
public class Beam implements INBTSerializable<NBTTagCompound> {
/**
* The mode of the beam
*/
@NotNull
public BeamMode mode = BeamModeRegistry.NONE;
/**
* The initial position the beams comes from.
*/
public Vec3d initLoc;
/**
* The vector that specifies the inclination of the beam.
* Set it to your final location and it'll work.
*/
public Vec3d slope;
/**
* The destination of the beam. Don't touch this, just set the slope to the final loc
* and let this class handleBeam it unless you know what you're doing.
*/
public Vec3d finalLoc;
/**
* The color of the beam including it's alpha.
*/
@NotNull
public Color color = Color.WHITE;
/**
* The world the beam will spawn in.
*/
@NotNull
public World world;
/**
* The effect the beam will produce across itself or at it's destination
*/
@Nullable
public Effect effect;
/**
* If true, the beam will phase through entities.
*/
public boolean ignoreEntities = false;
/**
* The raytrace produced from the beam after it spawns.
* Contains some neat methods you can use.
*/
public RayTraceResult trace;
/**
* The range of the raytrace. Will default to Beam_RANGE unless otherwise specified.
*/
public double range = ConfigValues.BEAM_RANGE;
// /**
// * A unique identifier for a beam. Used for uniqueness checks.
// */
// @NotNull
// public UUID uuid;
/**
* The number of times this beam has bounced or been reflected.
*/
public int bouncedTimes = 0;
/**
* The amount of times this beam is allowed to bounce or reflect.
*/
public int allowedBounceTimes = ConfigValues.BEAM_BOUNCE_LIMIT;
/**
* Will spawn a particle at either the beginning or at the end of the beam if any are enabled.
*/
public boolean enableParticleBeginning = false, enableParticleEnd;
/**
* The uuid of the entity that will not be affected by the beam.
*/
@Nullable
public UUID uuidToSkip;
/**
* The person theoretically casting the beam.
*/
@Nullable
public Entity caster;
/**
* The custom name of the beam
*/
public String customName = "";
/**
* The physical particle that will spawn at the beginning or end
*/
@SideOnly(Side.CLIENT)
private ParticleBuilder particle1, particle2;
public Beam(@NotNull World world, @NotNull Vec3d initLoc, @NotNull Vec3d slope, @NotNull Color color) {
this.world = world;
this.initLoc = initLoc;
this.slope = slope;
this.finalLoc = slope.normalize().scale(128).add(initLoc);
this.color = color;
Utils.HANDLER.runIfClient(() -> {
particle1 = new ParticleBuilder(3);
particle1.setRender(new ResourceLocation(Constants.MOD_ID, "particles/star"));
particle1.disableRandom();
particle1.disableMotionCalculation();
particle1.setAlphaFunction(new InterpFadeInOut(0f, 1f));
particle1.setScale(ThreadLocalRandom.current().nextFloat() * 2);
particle1.setColor(new Color(color.getRed(), color.getGreen(), color.getBlue(), 10));
particle2 = new ParticleBuilder(ThreadLocalRandom.current().nextInt(20, 100));
particle2.setRender(new ResourceLocation(Constants.MOD_ID, "particles/lens_flare_1"));
particle2.disableRandom();
particle2.disableMotionCalculation();
particle2.setColor(new Color(color.getRed(), color.getGreen(), color.getBlue(), ThreadLocalRandom.current().nextInt(10, 15)));
particle2.setAlphaFunction(new InterpFadeInOut((float) ThreadLocalRandom.current().nextDouble(0, 1), (float) ThreadLocalRandom.current().nextDouble(0, 1)));
particle2.setScale((float) ThreadLocalRandom.current().nextDouble(0.5, 2.5));
});
}
public Beam(World world, double initX, double initY, double initZ, double slopeX, double slopeY, double slopeZ, Color color) {
this(world, new Vec3d(initX, initY, initZ), new Vec3d(slopeX, slopeY, slopeZ), color);
}
public Beam(World world, double initX, double initY, double initZ, double slopeX, double slopeY, double slopeZ, float red, float green, float blue, float alpha) {
this(world, initX, initY, initZ, slopeX, slopeY, slopeZ, new Color(red, green, blue, alpha));
}
public Beam(NBTTagCompound compound) {
deserializeNBT(compound);
}
public boolean doBeamsMatch(Beam beam) {
return beam.color.getRGB() == color.getRGB()
&& beam.slope.xCoord == slope.xCoord
&& beam.slope.yCoord == slope.yCoord
&& beam.slope.zCoord == slope.zCoord
&& beam.initLoc.xCoord == initLoc.xCoord
&& beam.initLoc.yCoord == initLoc.yCoord
&& beam.initLoc.zCoord == initLoc.zCoord
&& beam.enableParticleEnd == enableParticleEnd
&& beam.enableParticleBeginning == enableParticleBeginning
&& beam.ignoreEntities == ignoreEntities
&& beam.allowedBounceTimes == allowedBounceTimes
&& beam.bouncedTimes == bouncedTimes
&& beam.range == range
&& beam.mode.equals(mode);
}
/**
* Will create a beam that's exactly like the one passed.
*
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam() {
return createSimilarBeam(initLoc, finalLoc);
}
/**
* Will create a beam that's exactly like the one passed except in color.
*
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam(Color color) {
return createSimilarBeam(initLoc, finalLoc, color);
}
/**
* Will create a similar beam that starts from the position this beam ended at
* and will set it's slope to the one specified. So it's a new beam from the position
* you last hit to the new one you specify.
*
* @param slope The slope or destination or final location the beam will point to.
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam(Vec3d slope) {
return createSimilarBeam(finalLoc, slope);
}
/**
* Will create a similar beam that starts and ends in the positions you specify
*
* @param init The initial location or origin to spawn the beam from.
* @param dir The direction or slope or final destination or location the beam will point to.
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam(Vec3d init, Vec3d dir) {
return createSimilarBeam(init, dir, color);
}
/**
* Will create a similar beam that starts and ends in the positions you specify, with a custom color.
*
* @param init The initial location or origin to spawn the beam from.
* @param dir The direction or slope or final destination or location the beam will point to.
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam(Vec3d init, Vec3d dir, Color color) {
return new Beam(world, init, dir, color)
.setIgnoreEntities(ignoreEntities)
.setAllowedBounceTimes(allowedBounceTimes)
.setBouncedTimes(bouncedTimes)
.incrementBouncedTimes()
.setMode(mode)
.setRange(range)
.setCaster(caster);
}
/**
* Will change the mode of the beam
*
* @param mode Defines the new mode this beam will be.
* @return The new beam created. Can be modified as needed.
*/
public Beam setMode(@NotNull BeamMode mode) {
this.mode = mode;
return this;
}
/**
* Will change the name of the beam.
*
* @param name Defines the custom name of the beam.
* @return The new beam created. Can be modified as needed.
*/
public Beam setName(@NotNull String name) {
this.customName = name;
return this;
}
/**
* Will set the theoretical caster of the beam.
*
* @param caster Defines the entity casting the beam.
* @return The new beam created. Can be modified as needed.
*/
public Beam setCaster(@Nullable Entity caster) {
this.caster = caster;
return this;
}
/**
* The RayTrace will skip the first time it hits an entity with this uuid
*
* @param uuidToSkip The uuid to skip the first time it's detected
* @return The new beam created. Can be modified as needed.
*/
public Beam setUUIDToSkip(UUID uuidToSkip) {
this.uuidToSkip = uuidToSkip;
return this;
}
/**
* Will create a tiny particle at the initLoc of the beam
*
* @return The new beam created. Can be modified as needed.
*/
public Beam enableParticleBeginning() {
this.enableParticleBeginning = true;
return this;
}
/**
* Will create a tiny particle at the end of the beam
*
* @return The new beam created. Can be modified as needed.
*/
public Beam enableParticleEnd() {
this.enableParticleEnd = true;
return this;
}
/**
* Will set the amount of times this beam has already bounced or been reflected
*
* @param bouncedTimes The amount of times this beam has bounced or been reflected
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setBouncedTimes(int bouncedTimes) {
this.bouncedTimes = bouncedTimes;
return this;
}
/**
* Will set the amount of times this beam will be allowed to bounce or reflect.
*
* @param allowedBounceTimes The amount of times this beam is allowed to bounce or reflect
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setAllowedBounceTimes(int allowedBounceTimes) {
this.allowedBounceTimes = allowedBounceTimes;
return this;
}
/**
* Will change the slope or destination or final location the beam will point to.
*
* @param slope The final location or destination.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setSlope(@NotNull Vec3d slope) {
this.slope = slope;
this.finalLoc = slope.normalize().scale(128).add(initLoc);
return this;
}
/**
* Will increment the amount of times this beam has bounced or reflected
*
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam incrementBouncedTimes() {
bouncedTimes++;
return this;
}
/**
* Will change the color of the beam with the alpha.
*
* @param color The color of the new beam.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setColor(@NotNull Color color) {
this.color = color;
return this;
}
/**
* If set to true, the beam will phase through entities.
*
* @param ignoreEntities The boolean that will specify if the beam should phase through gravityProtection or not. Default false.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setIgnoreEntities(boolean ignoreEntities) {
this.ignoreEntities = ignoreEntities;
return this;
}
/**
* Will set the beam's new starting position or origin and will continue on towards the slope still specified.
*
* @param initLoc The new initial location to set the beam to exciterPos from.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setInitLoc(@NotNull Vec3d initLoc) {
this.initLoc = initLoc;
this.finalLoc = slope.normalize().scale(128).add(initLoc);
return this;
}
/**
* Will set the beam's effect if you don't want it to autodetect the effect by itself from the color
* you specified.
*
* @param effect The new effect this beam will produce.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setEffect(@Nullable Effect effect) {
this.effect = effect;
return this;
}
/**
* Will set the range the raytrace will attempt.
*
* @param range The new range of the beam. Default: Constants.BEAM_RANGE
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setRange(double range) {
this.range = range;
return this;
}
public Beam setUUID(UUID uuid) {
// this.uuid = uuid;
return this;
}
/**
* Will initialize all variables left to prepare before the beam actually spawns.
*
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
private Beam initializeVariables() {
// EFFECT CHECKING //
if (effect == null && mode instanceof ModeEffect) {
Effect tempEffect = EffectTracker.getEffect(this);
if (tempEffect != null) effect = tempEffect;
} else if (effect != null && !(mode instanceof ModeEffect)) effect = null;
// EFFECT CHECKING //
// BEAM PHASING CHECKS //
EntityTrace entityTrace = new EntityTrace(world, initLoc, slope).setUUIDToSkip(uuidToSkip).setRange(range);
if (ignoreEntities || (effect != null && effect.getType() == EffectType.BEAM)) // If anyone of these are true, phase beam
trace = entityTrace.setIgnoreEntities(true).cast();
else trace = entityTrace.setIgnoreEntities(false).cast();
// BEAM PHASING CHECKS //
if (trace != null && trace.hitVec != null) this.finalLoc = trace.hitVec;
return this;
}
/**
* Will spawn the final complete beam.
*/
public void spawn() {
if (world.isRemote) return;
if (color.getAlpha() <= 1) return;
if (bouncedTimes > allowedBounceTimes) return;
initializeVariables();
if (trace == null) return;
if (trace.hitVec == null) return;
if (finalLoc == null) return;
// EFFECT HANDLING //
boolean pass = true;
boolean traceCompleted = false;
// Making sure we don't recur //
int tries = 0;
// IBeamHandler handling
while (!traceCompleted && tries < 100) {
tries++;
if (trace == null)
return;
else if (trace.typeOfHit == RayTraceResult.Type.BLOCK) {
BlockPos pos = trace.getBlockPos();
IBlockState state = world.getBlockState(pos);
BeamHitEvent event = new BeamHitEvent(world, this, pos, state);
MinecraftForge.EVENT_BUS.post(event);
if (event.getResult() == Event.Result.DEFAULT) {
traceCompleted = true;
if (state.getBlock() instanceof IBeamHandler) {
traceCompleted = (((IBeamHandler) state.getBlock()).handleBeam(world, pos, this));
pass = false;
}
} else {
traceCompleted = event.getResult() == Event.Result.DENY;
pass = event.getResult() == Event.Result.ALLOW;
}
} else
traceCompleted = trace.typeOfHit != RayTraceResult.Type.ENTITY ||
!MinecraftForge.EVENT_BUS.post(new BeamHitEntityEvent(world, this, trace.entityHit));
if (!traceCompleted) traceCompleted = recast();
}
// Effect handling
if (mode instanceof ModeEffect)
if (effect != null) {
if (effect.getType() == EffectType.BEAM)
EffectTracker.addEffect(world, this);
else if (pass) {
if (effect.getType() == EffectType.SINGLE) {
if (trace.typeOfHit != RayTraceResult.Type.MISS)
EffectTracker.addEffect(world, trace.hitVec, effect);
else if (trace.typeOfHit == RayTraceResult.Type.BLOCK) {
BlockPos pos = trace.getBlockPos();
EffectTracker.addEffect(world, new Vec3d(pos.getX() + 0.5, pos.getY() + 0.5, pos.getZ() + 0.5), effect);
}
}
}
}
// EFFECT HANDLING
// ENTITY REFLECTING
if (trace.typeOfHit == RayTraceResult.Type.ENTITY && trace.entityHit instanceof EntityLivingBase) {
EntityLivingBase entity = (EntityLivingBase) trace.entityHit;
boolean flag = true;
for (ItemStack armor : entity.getArmorInventoryList()) {
if (armor == null) {
flag = false;
break;
}
if (!(armor.getItem() instanceof IReflectiveArmor)) {
flag = false;
break;
}
}
if (flag)
createSimilarBeam(entity.getLook(1)).setUUIDToSkip(entity.getUniqueID()).enableParticleBeginning().spawn();
}
// ENTITY REFLECTING
// Particle packet sender
Utils.HANDLER.fireLaserPacket(this);
// PARTICLES
if (enableParticleBeginning) Utils.HANDLER.runIfClient(() -> {
if (ThreadLocalRandom.current().nextInt(10) == 0)
ParticleSpawner.spawn(particle1, world, new StaticInterp<>(initLoc), 1);
if (ThreadLocalRandom.current().nextInt(100) == 0)
ParticleSpawner.spawn(particle2, world, new StaticInterp<>(initLoc), 1);
});
if (trace.hitVec != null && enableParticleEnd) Utils.HANDLER.runIfClient(() -> {
if (ThreadLocalRandom.current().nextInt(10) == 0)
ParticleSpawner.spawn(particle1, world, new StaticInterp<>(trace.hitVec), 1);
if (ThreadLocalRandom.current().nextInt(100) == 0)
ParticleSpawner.spawn(particle2, world, new StaticInterp<>(trace.hitVec), 1);
});
// PARTICLES
}
private boolean recast() {
EntityTrace entityTrace = new EntityTrace(world, this).setUUIDToSkip(uuidToSkip);
if (entityTrace.range <= 0) return true;
if (entityTrace.rayTraceResult != null)
trace = entityTrace.rayTraceResult;
else if (ignoreEntities || (effect != null && effect.getType() == EffectType.BEAM)) // If anyone of these are true, phase beam
trace = entityTrace.setIgnoreEntities(true).cast();
else trace = entityTrace.setIgnoreEntities(false).cast();
if (trace != null && trace.hitVec != null) this.finalLoc = trace.hitVec;
return false;
}
@Override
public boolean equals(Object other) {
return super.equals(other);
}
@Override
public int hashCode() {
return super.hashCode();
}
@Override
public NBTTagCompound serializeNBT() {
NBTTagCompound compound = new NBTTagCompound();
compound.setDouble("init_loc_x", initLoc.xCoord);
compound.setDouble("init_loc_y", initLoc.yCoord);
compound.setDouble("init_loc_z", initLoc.zCoord);
compound.setDouble("slope_x", slope.xCoord);
compound.setDouble("slope_y", slope.yCoord);
compound.setDouble("slope_z", slope.zCoord);
compound.setInteger("color", color.getRGB());
compound.setInteger("world", world.provider.getDimension());
compound.setInteger("bounce_times", bouncedTimes);
compound.setInteger("allowed_bounce_times", allowedBounceTimes);
compound.setDouble("range", range);
compound.setString("name", customName);
compound.setBoolean("ignore_entities", ignoreEntities);
compound.setBoolean("enable_particle_beginning", enableParticleBeginning);
compound.setBoolean("enable_particle_end", enableParticleEnd);
compound.setString("mode", mode.getName());
if (uuidToSkip != null) compound.setUniqueId("uuid_to_skip", uuidToSkip);
return compound;
}
@Override
public void deserializeNBT(NBTTagCompound nbt) {
if (nbt.hasKey("world")) {
world = FMLCommonHandler.instance().getMinecraftServerInstance().worldServerForDimension(nbt.getInteger("dim"));
} else throw new NullPointerException("'world' key not found or missing in deserialized beam object.");
if (nbt.hasKey("init_loc_x") && nbt.hasKey("init_loc_y") && nbt.hasKey("init_loc_z")) {
initLoc = new Vec3d(nbt.getDouble("init_loc_x"), nbt.getDouble("init_loc_y"), nbt.getDouble("init_loc_z"));
} else throw new NullPointerException("'init_loc' key not found or missing in deserialized beam object.");
if (nbt.hasKey("slope_x") && nbt.hasKey("slope_y") && nbt.hasKey("slope_z")) {
slope = new Vec3d(nbt.getDouble("slope_x"), nbt.getDouble("slope_y"), nbt.getDouble("slope_z"));
finalLoc = slope.normalize().scale(128).add(initLoc);
} else throw new NullPointerException("'slope' key not found or missing in deserialized beam object.");
if (nbt.hasKey("color")) {
color = new Color(nbt.getInteger("color"), true);
} else
throw new NullPointerException("'color' or 'color_alpha' keys not found or missing in deserialized beam object.");
if (nbt.hasKey("name")) customName = nbt.getString("name");
if (nbt.hasKey("uuid_to_skip")) uuidToSkip = nbt.getUniqueId("uuid_to_skip");
if (nbt.hasKey("ignore_entities")) ignoreEntities = nbt.getBoolean("ignore_entities");
if (nbt.hasKey("range")) range = nbt.getDouble("range");
if (nbt.hasKey("bounce_times")) bouncedTimes = nbt.getInteger("bounce_times");
if (nbt.hasKey("allowed_bounce_times")) allowedBounceTimes = nbt.getInteger("allowed_bounce_times");
if (nbt.hasKey("enable_particle_beginning"))
enableParticleBeginning = nbt.getBoolean("enable_particle_beginning");
mode = BeamModeRegistry.getMode(nbt.getString("mode"));
if (nbt.hasKey("enable_particle_end")) enableParticleEnd = nbt.getBoolean("enable_particle_end");
}
}
| src/main/java/com/teamwizardry/refraction/api/beam/Beam.java | package com.teamwizardry.refraction.api.beam;
import com.teamwizardry.librarianlib.client.fx.particle.ParticleBuilder;
import com.teamwizardry.librarianlib.client.fx.particle.ParticleSpawner;
import com.teamwizardry.librarianlib.client.fx.particle.functions.InterpFadeInOut;
import com.teamwizardry.librarianlib.common.util.math.interpolate.StaticInterp;
import com.teamwizardry.refraction.api.ConfigValues;
import com.teamwizardry.refraction.api.Constants;
import com.teamwizardry.refraction.api.Utils;
import com.teamwizardry.refraction.api.beam.Effect.EffectType;
import com.teamwizardry.refraction.api.beam.modes.BeamMode;
import com.teamwizardry.refraction.api.beam.modes.BeamModeRegistry;
import com.teamwizardry.refraction.api.beam.modes.ModeEffect;
import com.teamwizardry.refraction.api.raytrace.EntityTrace;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.util.math.Vec3d;
import net.minecraft.world.World;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.common.util.INBTSerializable;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.common.eventhandler.Event;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
public class Beam implements INBTSerializable<NBTTagCompound> {
/**
* The mode of the beam
*/
@NotNull
public BeamMode mode = BeamModeRegistry.NONE;
/**
* The initial position the beams comes from.
*/
public Vec3d initLoc;
/**
* The vector that specifies the inclination of the beam.
* Set it to your final location and it'll work.
*/
public Vec3d slope;
/**
* The destination of the beam. Don't touch this, just set the slope to the final loc
* and let this class handleBeam it unless you know what you're doing.
*/
public Vec3d finalLoc;
/**
* The color of the beam including it's alpha.
*/
@NotNull
public Color color = Color.WHITE;
/**
* The world the beam will spawn in.
*/
@NotNull
public World world;
/**
* The effect the beam will produce across itself or at it's destination
*/
@Nullable
public Effect effect;
/**
* If true, the beam will phase through entities.
*/
public boolean ignoreEntities = false;
/**
* The raytrace produced from the beam after it spawns.
* Contains some neat methods you can use.
*/
public RayTraceResult trace;
/**
* The range of the raytrace. Will default to Beam_RANGE unless otherwise specified.
*/
public double range = ConfigValues.BEAM_RANGE;
// /**
// * A unique identifier for a beam. Used for uniqueness checks.
// */
// @NotNull
// public UUID uuid;
/**
* The number of times this beam has bounced or been reflected.
*/
public int bouncedTimes = 0;
/**
* The amount of times this beam is allowed to bounce or reflect.
*/
public int allowedBounceTimes = ConfigValues.BEAM_BOUNCE_LIMIT;
/**
* Will spawn a particle at either the beginning or at the end of the beam if any are enabled.
*/
public boolean enableParticleBeginning = false, enableParticleEnd;
/**
* The uuid of the entity that will not be affected by the beam.
*/
@Nullable
public UUID uuidToSkip;
/**
* The person theoretically casting the beam.
*/
@Nullable
public Entity caster;
/**
* The custom name of the beam
*/
public String customName = "";
/**
* The physical particle that will spawn at the beginning or end
*/
@SideOnly(Side.CLIENT)
private ParticleBuilder particle1, particle2;
public Beam(@NotNull World world, @NotNull Vec3d initLoc, @NotNull Vec3d slope, @NotNull Color color) {
this.world = world;
this.initLoc = initLoc;
this.slope = slope;
this.finalLoc = slope.normalize().scale(128).add(initLoc);
this.color = color;
Utils.HANDLER.runIfClient(() -> {
particle1 = new ParticleBuilder(3);
particle1.setRender(new ResourceLocation(Constants.MOD_ID, "particles/star"));
particle1.disableRandom();
particle1.disableMotionCalculation();
particle1.setAlphaFunction(new InterpFadeInOut(0f, 1f));
particle1.setScale(ThreadLocalRandom.current().nextFloat() * 2);
particle1.setColor(new Color(color.getRed(), color.getGreen(), color.getBlue(), 10));
particle2 = new ParticleBuilder(ThreadLocalRandom.current().nextInt(20, 100));
particle2.setRender(new ResourceLocation(Constants.MOD_ID, "particles/lens_flare_1"));
particle2.disableRandom();
particle2.disableMotionCalculation();
particle2.setColor(new Color(color.getRed(), color.getGreen(), color.getBlue(), ThreadLocalRandom.current().nextInt(10, 15)));
particle2.setAlphaFunction(new InterpFadeInOut((float) ThreadLocalRandom.current().nextDouble(0, 1), (float) ThreadLocalRandom.current().nextDouble(0, 1)));
particle2.setScale((float) ThreadLocalRandom.current().nextDouble(0.5, 2.5));
});
}
public Beam(World world, double initX, double initY, double initZ, double slopeX, double slopeY, double slopeZ, Color color) {
this(world, new Vec3d(initX, initY, initZ), new Vec3d(slopeX, slopeY, slopeZ), color);
}
public Beam(World world, double initX, double initY, double initZ, double slopeX, double slopeY, double slopeZ, float red, float green, float blue, float alpha) {
this(world, initX, initY, initZ, slopeX, slopeY, slopeZ, new Color(red, green, blue, alpha));
}
public Beam(NBTTagCompound compound) {
deserializeNBT(compound);
}
public boolean doBeamsMatch(Beam beam) {
return beam.color.getRGB() == color.getRGB()
&& beam.slope.xCoord == slope.xCoord
&& beam.slope.yCoord == slope.yCoord
&& beam.slope.zCoord == slope.zCoord
&& beam.initLoc.xCoord == initLoc.xCoord
&& beam.initLoc.yCoord == initLoc.yCoord
&& beam.initLoc.zCoord == initLoc.zCoord
&& beam.enableParticleEnd == enableParticleEnd
&& beam.enableParticleBeginning == enableParticleBeginning
&& beam.ignoreEntities == ignoreEntities
&& beam.allowedBounceTimes == allowedBounceTimes
&& beam.bouncedTimes == bouncedTimes
&& beam.range == range
&& beam.mode.equals(mode);
}
/**
* Will create a beam that's exactly like the one passed.
*
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam() {
return createSimilarBeam(initLoc, finalLoc);
}
/**
* Will create a beam that's exactly like the one passed except in color.
*
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam(Color color) {
return createSimilarBeam(initLoc, finalLoc, color);
}
/**
* Will create a similar beam that starts from the position this beam ended at
* and will set it's slope to the one specified. So it's a new beam from the position
* you last hit to the new one you specify.
*
* @param slope The slope or destination or final location the beam will point to.
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam(Vec3d slope) {
return createSimilarBeam(finalLoc, slope);
}
/**
* Will create a similar beam that starts and ends in the positions you specify
*
* @param init The initial location or origin to spawn the beam from.
* @param dir The direction or slope or final destination or location the beam will point to.
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam(Vec3d init, Vec3d dir) {
return createSimilarBeam(init, dir, color);
}
/**
* Will create a similar beam that starts and ends in the positions you specify, with a custom color.
*
* @param init The initial location or origin to spawn the beam from.
* @param dir The direction or slope or final destination or location the beam will point to.
* @return The new beam created. Can be modified as needed.
*/
public Beam createSimilarBeam(Vec3d init, Vec3d dir, Color color) {
return new Beam(world, init, dir, color)
.setIgnoreEntities(ignoreEntities)
.setAllowedBounceTimes(allowedBounceTimes)
.setBouncedTimes(bouncedTimes)
.incrementBouncedTimes()
.setMode(mode)
.setRange(range)
.setCaster(caster);
}
/**
* Will change the mode of the beam
*
* @param mode Defines the new mode this beam will be.
* @return The new beam created. Can be modified as needed.
*/
public Beam setMode(@NotNull BeamMode mode) {
this.mode = mode;
return this;
}
/**
* Will change the name of the beam.
*
* @param name Defines the custom name of the beam.
* @return The new beam created. Can be modified as needed.
*/
public Beam setName(@NotNull String name) {
this.customName = name;
return this;
}
/**
* Will set the theoretical caster of the beam.
*
* @param caster Defines the entity casting the beam.
* @return The new beam created. Can be modified as needed.
*/
public Beam setCaster(@Nullable Entity caster) {
this.caster = caster;
return this;
}
/**
* The RayTrace will skip the first time it hits an entity with this uuid
*
* @param uuidToSkip The uuid to skip the first time it's detected
* @return The new beam created. Can be modified as needed.
*/
public Beam setUUIDToSkip(UUID uuidToSkip) {
this.uuidToSkip = uuidToSkip;
return this;
}
/**
* Will create a tiny particle at the initLoc of the beam
*
* @return The new beam created. Can be modified as needed.
*/
public Beam enableParticleBeginning() {
this.enableParticleBeginning = true;
return this;
}
/**
* Will create a tiny particle at the end of the beam
*
* @return The new beam created. Can be modified as needed.
*/
public Beam enableParticleEnd() {
this.enableParticleEnd = true;
return this;
}
/**
* Will set the amount of times this beam has already bounced or been reflected
*
* @param bouncedTimes The amount of times this beam has bounced or been reflected
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setBouncedTimes(int bouncedTimes) {
this.bouncedTimes = bouncedTimes;
return this;
}
/**
* Will set the amount of times this beam will be allowed to bounce or reflect.
*
* @param allowedBounceTimes The amount of times this beam is allowed to bounce or reflect
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setAllowedBounceTimes(int allowedBounceTimes) {
this.allowedBounceTimes = allowedBounceTimes;
return this;
}
/**
* Will change the slope or destination or final location the beam will point to.
*
* @param slope The final location or destination.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setSlope(@NotNull Vec3d slope) {
this.slope = slope;
this.finalLoc = slope.normalize().scale(128).add(initLoc);
return this;
}
/**
* Will increment the amount of times this beam has bounced or reflected
*
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam incrementBouncedTimes() {
bouncedTimes++;
return this;
}
/**
* Will change the color of the beam with the alpha.
*
* @param color The color of the new beam.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setColor(@NotNull Color color) {
this.color = color;
return this;
}
/**
* If set to true, the beam will phase through entities.
*
* @param ignoreEntities The boolean that will specify if the beam should phase through gravityProtection or not. Default false.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setIgnoreEntities(boolean ignoreEntities) {
this.ignoreEntities = ignoreEntities;
return this;
}
/**
* Will set the beam's new starting position or origin and will continue on towards the slope still specified.
*
* @param initLoc The new initial location to set the beam to exciterPos from.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setInitLoc(@NotNull Vec3d initLoc) {
this.initLoc = initLoc;
this.finalLoc = slope.normalize().scale(128).add(initLoc);
return this;
}
/**
* Will set the beam's effect if you don't want it to autodetect the effect by itself from the color
* you specified.
*
* @param effect The new effect this beam will produce.
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setEffect(@Nullable Effect effect) {
this.effect = effect;
return this;
}
/**
* Will set the range the raytrace will attempt.
*
* @param range The new range of the beam. Default: Constants.BEAM_RANGE
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
public Beam setRange(double range) {
this.range = range;
return this;
}
public Beam setUUID(UUID uuid) {
// this.uuid = uuid;
return this;
}
/**
* Will initialize all variables left to prepare before the beam actually spawns.
*
* @return This beam itself for the convenience of editing a beam in one line/chain.
*/
private Beam initializeVariables() {
// EFFECT CHECKING //
if (effect == null && mode instanceof ModeEffect) {
Effect tempEffect = EffectTracker.getEffect(this);
if (tempEffect != null) effect = tempEffect;
} else if (effect != null && !(mode instanceof ModeEffect)) effect = null;
// EFFECT CHECKING //
// BEAM PHASING CHECKS //
EntityTrace entityTrace = new EntityTrace(world, initLoc, slope).setUUIDToSkip(uuidToSkip).setRange(range);
if (ignoreEntities || (effect != null && effect.getType() == EffectType.BEAM)) // If anyone of these are true, phase beam
trace = entityTrace.setIgnoreEntities(true).cast();
else trace = entityTrace.setIgnoreEntities(false).cast();
// BEAM PHASING CHECKS //
if (trace != null && trace.hitVec != null) this.finalLoc = trace.hitVec;
return this;
}
/**
* Will spawn the final complete beam.
*/
public void spawn() {
if (world.isRemote) return;
if (color.getAlpha() <= 1) return;
if (bouncedTimes > allowedBounceTimes) return;
initializeVariables();
if (trace == null) return;
if (trace.hitVec == null) return;
if (finalLoc == null) return;
// EFFECT HANDLING //
boolean pass = true;
boolean traceCompleted = false;
// Making sure we don't recur //
int tries = 0;
// IBeamHandler handling
while (!traceCompleted && tries < 100) {
tries++;
if (trace == null)
return;
else if (trace.typeOfHit == RayTraceResult.Type.BLOCK) {
BlockPos pos = trace.getBlockPos();
IBlockState state = world.getBlockState(pos);
BeamHitEvent event = new BeamHitEvent(world, this, pos, state);
MinecraftForge.EVENT_BUS.post(event);
if (event.getResult() == Event.Result.DEFAULT) {
traceCompleted = true;
if (state.getBlock() instanceof IBeamHandler) {
traceCompleted = (((IBeamHandler) state.getBlock()).handleBeam(world, pos, this));
pass = false;
}
} else {
traceCompleted = event.getResult() == Event.Result.DENY;
pass = event.getResult() == Event.Result.ALLOW;
}
} else
traceCompleted = trace.typeOfHit != RayTraceResult.Type.ENTITY ||
!MinecraftForge.EVENT_BUS.post(new BeamHitEntityEvent(world, this, trace.entityHit));
if (!traceCompleted) traceCompleted = recast();
}
// Effect handling
if (mode instanceof ModeEffect)
if (effect != null) {
if (effect.getType() == EffectType.BEAM)
EffectTracker.addEffect(world, this);
else if (pass) {
if (effect.getType() == EffectType.SINGLE) {
if (trace.typeOfHit != RayTraceResult.Type.MISS)
EffectTracker.addEffect(world, trace.hitVec, effect);
else if (trace.typeOfHit == RayTraceResult.Type.BLOCK) {
BlockPos pos = trace.getBlockPos();
EffectTracker.addEffect(world, new Vec3d(pos.getX() + 0.5, pos.getY() + 0.5, pos.getZ() + 0.5), effect);
}
}
}
}
// EFFECT HANDLING
// ENTITY REFLECTING
if (trace.typeOfHit == RayTraceResult.Type.ENTITY && trace.entityHit instanceof EntityLivingBase) {
EntityLivingBase entity = (EntityLivingBase) trace.entityHit;
boolean flag = true;
for (ItemStack armor : entity.getArmorInventoryList()) {
if (armor == null) {
flag = false;
break;
}
if (armor.getItem() instanceof IReflectiveArmor) {
flag = false;
break;
}
}
if (flag)
createSimilarBeam(entity.getLook(1)).setUUIDToSkip(entity.getUniqueID()).enableParticleBeginning().spawn();
}
// ENTITY REFLECTING
// Particle packet sender
Utils.HANDLER.fireLaserPacket(this);
// PARTICLES
if (enableParticleBeginning) Utils.HANDLER.runIfClient(() -> {
if (ThreadLocalRandom.current().nextInt(10) == 0)
ParticleSpawner.spawn(particle1, world, new StaticInterp<>(initLoc), 1);
if (ThreadLocalRandom.current().nextInt(100) == 0)
ParticleSpawner.spawn(particle2, world, new StaticInterp<>(initLoc), 1);
});
if (trace.hitVec != null && enableParticleEnd) Utils.HANDLER.runIfClient(() -> {
if (ThreadLocalRandom.current().nextInt(10) == 0)
ParticleSpawner.spawn(particle1, world, new StaticInterp<>(trace.hitVec), 1);
if (ThreadLocalRandom.current().nextInt(100) == 0)
ParticleSpawner.spawn(particle2, world, new StaticInterp<>(trace.hitVec), 1);
});
// PARTICLES
}
private boolean recast() {
EntityTrace entityTrace = new EntityTrace(world, this).setUUIDToSkip(uuidToSkip);
if (entityTrace.range <= 0) return true;
if (entityTrace.rayTraceResult != null)
trace = entityTrace.rayTraceResult;
else if (ignoreEntities || (effect != null && effect.getType() == EffectType.BEAM)) // If anyone of these are true, phase beam
trace = entityTrace.setIgnoreEntities(true).cast();
else trace = entityTrace.setIgnoreEntities(false).cast();
if (trace != null && trace.hitVec != null) this.finalLoc = trace.hitVec;
return false;
}
@Override
public boolean equals(Object other) {
return super.equals(other);
}
@Override
public int hashCode() {
return super.hashCode();
}
@Override
public NBTTagCompound serializeNBT() {
NBTTagCompound compound = new NBTTagCompound();
compound.setDouble("init_loc_x", initLoc.xCoord);
compound.setDouble("init_loc_y", initLoc.yCoord);
compound.setDouble("init_loc_z", initLoc.zCoord);
compound.setDouble("slope_x", slope.xCoord);
compound.setDouble("slope_y", slope.yCoord);
compound.setDouble("slope_z", slope.zCoord);
compound.setInteger("color", color.getRGB());
compound.setInteger("world", world.provider.getDimension());
compound.setInteger("bounce_times", bouncedTimes);
compound.setInteger("allowed_bounce_times", allowedBounceTimes);
compound.setDouble("range", range);
compound.setString("name", customName);
compound.setBoolean("ignore_entities", ignoreEntities);
compound.setBoolean("enable_particle_beginning", enableParticleBeginning);
compound.setBoolean("enable_particle_end", enableParticleEnd);
compound.setString("mode", mode.getName());
if (uuidToSkip != null) compound.setUniqueId("uuid_to_skip", uuidToSkip);
return compound;
}
@Override
public void deserializeNBT(NBTTagCompound nbt) {
if (nbt.hasKey("world")) {
world = FMLCommonHandler.instance().getMinecraftServerInstance().worldServerForDimension(nbt.getInteger("dim"));
} else throw new NullPointerException("'world' key not found or missing in deserialized beam object.");
if (nbt.hasKey("init_loc_x") && nbt.hasKey("init_loc_y") && nbt.hasKey("init_loc_z")) {
initLoc = new Vec3d(nbt.getDouble("init_loc_x"), nbt.getDouble("init_loc_y"), nbt.getDouble("init_loc_z"));
} else throw new NullPointerException("'init_loc' key not found or missing in deserialized beam object.");
if (nbt.hasKey("slope_x") && nbt.hasKey("slope_y") && nbt.hasKey("slope_z")) {
slope = new Vec3d(nbt.getDouble("slope_x"), nbt.getDouble("slope_y"), nbt.getDouble("slope_z"));
finalLoc = slope.normalize().scale(128).add(initLoc);
} else throw new NullPointerException("'slope' key not found or missing in deserialized beam object.");
if (nbt.hasKey("color")) {
color = new Color(nbt.getInteger("color"), true);
} else
throw new NullPointerException("'color' or 'color_alpha' keys not found or missing in deserialized beam object.");
if (nbt.hasKey("name")) customName = nbt.getString("name");
if (nbt.hasKey("uuid_to_skip")) uuidToSkip = nbt.getUniqueId("uuid_to_skip");
if (nbt.hasKey("ignore_entities")) ignoreEntities = nbt.getBoolean("ignore_entities");
if (nbt.hasKey("range")) range = nbt.getDouble("range");
if (nbt.hasKey("bounce_times")) bouncedTimes = nbt.getInteger("bounce_times");
if (nbt.hasKey("allowed_bounce_times")) allowedBounceTimes = nbt.getInteger("allowed_bounce_times");
if (nbt.hasKey("enable_particle_beginning"))
enableParticleBeginning = nbt.getBoolean("enable_particle_beginning");
mode = BeamModeRegistry.getMode(nbt.getString("mode"));
if (nbt.hasKey("enable_particle_end")) enableParticleEnd = nbt.getBoolean("enable_particle_end");
}
}
| invert armor reflecting check
| src/main/java/com/teamwizardry/refraction/api/beam/Beam.java | invert armor reflecting check | <ide><path>rc/main/java/com/teamwizardry/refraction/api/beam/Beam.java
<ide> flag = false;
<ide> break;
<ide> }
<del> if (armor.getItem() instanceof IReflectiveArmor) {
<add> if (!(armor.getItem() instanceof IReflectiveArmor)) {
<ide> flag = false;
<ide> break;
<ide> } |
|
Java | apache-2.0 | 7bd4c6b9159d0c203c544c6b34b536437f094f67 | 0 | dkcreinoso/jitsi,bhatvv/jitsi,procandi/jitsi,ibauersachs/jitsi,marclaporte/jitsi,bhatvv/jitsi,tuijldert/jitsi,laborautonomo/jitsi,ringdna/jitsi,ringdna/jitsi,bebo/jitsi,level7systems/jitsi,pplatek/jitsi,cobratbq/jitsi,bebo/jitsi,bhatvv/jitsi,marclaporte/jitsi,ibauersachs/jitsi,ibauersachs/jitsi,ringdna/jitsi,iant-gmbh/jitsi,cobratbq/jitsi,procandi/jitsi,level7systems/jitsi,bebo/jitsi,laborautonomo/jitsi,Metaswitch/jitsi,ibauersachs/jitsi,jitsi/jitsi,level7systems/jitsi,jitsi/jitsi,iant-gmbh/jitsi,jitsi/jitsi,jibaro/jitsi,HelioGuilherme66/jitsi,HelioGuilherme66/jitsi,iant-gmbh/jitsi,tuijldert/jitsi,tuijldert/jitsi,mckayclarey/jitsi,jibaro/jitsi,tuijldert/jitsi,laborautonomo/jitsi,damencho/jitsi,jitsi/jitsi,gpolitis/jitsi,martin7890/jitsi,gpolitis/jitsi,damencho/jitsi,bebo/jitsi,cobratbq/jitsi,jibaro/jitsi,jibaro/jitsi,bhatvv/jitsi,459below/jitsi,damencho/jitsi,HelioGuilherme66/jitsi,pplatek/jitsi,ibauersachs/jitsi,level7systems/jitsi,jibaro/jitsi,tuijldert/jitsi,procandi/jitsi,ringdna/jitsi,dkcreinoso/jitsi,marclaporte/jitsi,gpolitis/jitsi,pplatek/jitsi,dkcreinoso/jitsi,martin7890/jitsi,mckayclarey/jitsi,pplatek/jitsi,Metaswitch/jitsi,cobratbq/jitsi,459below/jitsi,pplatek/jitsi,procandi/jitsi,459below/jitsi,mckayclarey/jitsi,gpolitis/jitsi,jitsi/jitsi,laborautonomo/jitsi,marclaporte/jitsi,iant-gmbh/jitsi,procandi/jitsi,level7systems/jitsi,mckayclarey/jitsi,HelioGuilherme66/jitsi,damencho/jitsi,dkcreinoso/jitsi,bhatvv/jitsi,459below/jitsi,martin7890/jitsi,ringdna/jitsi,cobratbq/jitsi,Metaswitch/jitsi,martin7890/jitsi,HelioGuilherme66/jitsi,bebo/jitsi,gpolitis/jitsi,mckayclarey/jitsi,marclaporte/jitsi,Metaswitch/jitsi,laborautonomo/jitsi,iant-gmbh/jitsi,dkcreinoso/jitsi,damencho/jitsi,martin7890/jitsi,459below/jitsi | /*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.gui.main.chat.conference;
import java.util.*;
import net.java.sip.communicator.impl.gui.*;
import net.java.sip.communicator.impl.gui.i18n.*;
import net.java.sip.communicator.impl.gui.main.chat.*;
import net.java.sip.communicator.impl.gui.utils.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.event.*;
import net.java.sip.communicator.util.*;
/**
* The <tt>ConferenceChatPanel</tt> is the chat panel corresponding to a
* multi user chat.
*
* @author Yana Stamcheva
*/
public class ConferenceChatPanel
extends ChatPanel
implements ChatRoomMessageListener,
ChatRoomPropertyChangeListener,
ChatRoomLocalUserStatusListener,
ChatRoomMemberListener
{
private Logger logger = Logger.getLogger(ConferenceChatPanel.class);
private ChatRoom chatRoom;
private ChatWindowManager chatWindowManager;
/**
* Creates an instance of <tt>ConferenceChatPanel</tt>.
*
* @param chatWindow the <tt>ChatWindow</tt> that contains this chat panel
* @param chatRoom the <tt>ChatRoom</tt> object, which provides us the multi
* user chat functionality
*/
public ConferenceChatPanel(ChatWindow chatWindow, ChatRoom chatRoom)
{
super(chatWindow);
this.chatWindowManager = chatWindow.getMainFrame().getChatWindowManager();
this.chatRoom = chatRoom;
List membersList = chatRoom.getMembers();
for (int i = 0; i < membersList.size(); i ++)
{
getChatContactListPanel().addContact((Contact)membersList.get(i));
}
this.chatRoom.addMessageListener(this);
this.chatRoom.addChatRoomPropertyChangeListener(this);
this.chatRoom.addLocalUserStatusListener(this);
this.chatRoom.addMemberListener(this);
}
/**
* Implements the <tt>ChatPanel.getChatName</tt> method.
*
* @return the name of the chat room.
*/
public String getChatName()
{
return chatRoom.getName();
}
/**
* Implements the <tt>ChatPanel.getChatIdentifier</tt> method.
*
* @return the <tt>ChatRoom</tt>
*/
public Object getChatIdentifier()
{
return chatRoom;
}
/**
* Implements the <tt>ChatPanel.getChatStatus</tt> method.
*
* @return the status of this chat room
*/
public PresenceStatus getChatStatus()
{
return null;
}
/**
* Implements the <tt>ChatPanel.loadHistory</tt> method.
* <br>
* Loads the history for this chat room.
*/
public void loadHistory()
{
}
/**
* Implements the <tt>ChatPanel.loadHistory(escapedMessageID)</tt> method.
* <br>
* Loads the history for this chat room and escapes the last message
* received.
*/
public void loadHistory(String escapedMessageID)
{
// TODO Auto-generated method stub
}
/**
* Implements the <tt>ChatPanel.loadPreviousFromHistory</tt> method.
* <br>
* Loads the previous "page" in the history.
*/
public void loadPreviousFromHistory()
{
// TODO Auto-generated method stub
}
/**
* Implements the <tt>ChatPanel.loadNextFromHistory</tt> method.
* <br>
* Loads the next "page" in the history.
*/
public void loadNextFromHistory()
{
// TODO Auto-generated method stub
}
/**
* Implements the <tt>ChatPanel.sendMessage</tt> method.
* <br>
* Sends a message to the chat room.
*/
protected void sendMessage()
{
String body = this.getTextFromWriteArea();
Message msg = chatRoom.createMessage(body);
try
{
chatRoom.sendMessage(msg);
}
catch (Exception ex)
{
logger.error("Failed to send message.", ex);
this.refreshWriteArea();
this.processMessage(
chatRoom.getName(),
new Date(System.currentTimeMillis()),
Constants.OUTGOING_MESSAGE,
msg.getContent());
this.processMessage(
chatRoom.getName(),
new Date(System.currentTimeMillis()),
Constants.ERROR_MESSAGE,
Messages.getI18NString("msgDeliveryInternalError")
.getText());
}
}
/**
* Implements the <tt>ChatPanel.treatReceivedMessage</tt> method.
* <br>
* Treats a received message from the given contact.
*/
public void treatReceivedMessage(Contact sourceContact)
{
}
/**
* Implements the <tt>ChatPanel.sendTypingNotification</tt> method.
* <br>
* Sends a typing notification.
*/
public int sendTypingNotification(int typingState)
{
return 0;
}
/**
* Implements the <tt>ChatPanel.getFirstHistoryMsgTimestamp</tt> method.
*/
public Date getFirstHistoryMsgTimestamp()
{
return null;
}
/**
* Implements the <tt>ChatPanel.getLastHistoryMsgTimestamp</tt> method.
*/
public Date getLastHistoryMsgTimestamp()
{
return null;
}
/**
* Implements the <tt>ChatRoomMessageListener.messageReceived</tt> method.
* <br>
* Obtains the corresponding <tt>ChatPanel</tt> and proccess the message
* there.
*/
public void messageReceived(ChatRoomMessageReceivedEvent evt)
{
ChatRoom sourceChatRoom = (ChatRoom) evt.getSource();
if(!sourceChatRoom.equals(chatRoom))
return;
logger.trace("MESSAGE RECEIVED from contact: "
+ evt.getSourceContact().getAddress());
Contact sourceContact = evt.getSourceContact();
Date date = evt.getTimestamp();
Message message = evt.getSourceMessage();
ChatRoom chatRoom = (ChatRoom) evt.getSource();
ChatPanel chatPanel = chatWindowManager.getChatRoom(chatRoom);
chatPanel.processMessage(
sourceContact.getDisplayName(), date,
Constants.INCOMING_MESSAGE, message.getContent());
chatWindowManager.openChat(chatPanel, false);
GuiActivator.getAudioNotifier()
.createAudio(Sounds.INCOMING_MESSAGE).play();
chatPanel.treatReceivedMessage(sourceContact);
}
/**
* Implements the <tt>ChatRoomMessageListener.messageDelivered</tt> method.
* <br>
* Shows the message in the conversation area and clears the write message
* area.
*/
public void messageDelivered(ChatRoomMessageDeliveredEvent evt)
{
ChatRoom sourceChatRoom = (ChatRoom) evt.getSource();
if(!sourceChatRoom.equals(chatRoom))
return;
logger.trace("MESSAGE DELIVERED to contact: "
+ evt.getDestinationContact().getAddress());
Message msg = evt.getSourceMessage();
ChatPanel chatPanel = null;
if(chatWindowManager.isChatOpenedForChatRoom(sourceChatRoom))
chatPanel = chatWindowManager.getChatRoom(sourceChatRoom);
if (chatPanel != null)
{
ProtocolProviderService protocolProvider = evt
.getDestinationContact().getProtocolProvider();
logger.trace("MESSAGE DELIVERED: process message to chat for contact: "
+ evt.getDestinationContact().getAddress());
chatPanel.processMessage(getChatWindow().getMainFrame()
.getAccount(protocolProvider), evt.getTimestamp(),
Constants.OUTGOING_MESSAGE, msg.getContent());
chatPanel.refreshWriteArea();
}
}
/**
* Implements the <tt>ChatRoomMessageListener.messageDeliveryFailed</tt>
* method.
* <br>
* In the conversation area show an error message, explaining the problem.
*/
public void messageDeliveryFailed(ChatRoomMessageDeliveryFailedEvent evt)
{
ChatRoom sourceChatRoom = (ChatRoom) evt.getSource();
if(!sourceChatRoom.equals(chatRoom))
return;
String errorMsg = null;
Message sourceMessage = (Message) evt.getSource();
Contact sourceContact = evt.getDestinationContact();
if (evt.getErrorCode()
== MessageDeliveryFailedEvent.OFFLINE_MESSAGES_NOT_SUPPORTED) {
errorMsg = Messages.getI18NString(
"msgDeliveryOfflineNotSupported").getText();
}
else if (evt.getErrorCode()
== MessageDeliveryFailedEvent.NETWORK_FAILURE) {
errorMsg = Messages.getI18NString("msgNotDelivered").getText();
}
else if (evt.getErrorCode()
== MessageDeliveryFailedEvent.PROVIDER_NOT_REGISTERED) {
errorMsg = Messages.getI18NString(
"msgSendConnectionProblem").getText();
}
else if (evt.getErrorCode()
== MessageDeliveryFailedEvent.INTERNAL_ERROR) {
errorMsg = Messages.getI18NString(
"msgDeliveryInternalError").getText();
}
else {
errorMsg = Messages.getI18NString(
"msgDeliveryFailedUnknownError").getText();
}
ChatPanel chatPanel = chatWindowManager
.getChatRoom(chatRoom);
chatPanel.refreshWriteArea();
chatPanel.processMessage(
sourceContact.getDisplayName(),
new Date(System.currentTimeMillis()),
Constants.OUTGOING_MESSAGE,
sourceMessage.getContent());
chatPanel.processMessage(
sourceContact.getDisplayName(),
new Date(System.currentTimeMillis()),
Constants.ERROR_MESSAGE,
errorMsg);
chatWindowManager.openChat(chatPanel, false);
}
public void chatRoomChanged(ChatRoomPropertyChangeEvent event)
{
}
public void localUserStatusChanged(ChatRoomLocalUserStatusChangeEvent evt)
{
}
public void memberStatusChanged(ChatRoomMemberEvent evt)
{
}
}
| src/net/java/sip/communicator/impl/gui/main/chat/conference/ConferenceChatPanel.java | /*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.gui.main.chat.conference;
import java.util.*;
import net.java.sip.communicator.impl.gui.*;
import net.java.sip.communicator.impl.gui.i18n.*;
import net.java.sip.communicator.impl.gui.main.chat.*;
import net.java.sip.communicator.impl.gui.utils.*;
import net.java.sip.communicator.service.gui.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.event.*;
import net.java.sip.communicator.util.*;
/**
* The <tt>ConferenceChatPanel</tt> is the chat panel corresponding to a
* multi user chat.
*
* @author Yana Stamcheva
*/
public class ConferenceChatPanel
extends ChatPanel
implements ChatRoomMessageListener,
ChatRoomPropertyChangeListener,
ChatRoomLocalUserStatusListener,
ChatRoomParticipantStatusListener
{
private Logger logger = Logger.getLogger(ConferenceChatPanel.class);
private ChatRoom chatRoom;
private ChatWindowManager chatWindowManager;
/**
* Creates an instance of <tt>ConferenceChatPanel</tt>.
*
* @param chatWindow the <tt>ChatWindow</tt> that contains this chat panel
* @param chatRoom the <tt>ChatRoom</tt> object, which provides us the multi
* user chat functionality
*/
public ConferenceChatPanel(ChatWindow chatWindow, ChatRoom chatRoom)
{
super(chatWindow);
this.chatWindowManager = chatWindow.getMainFrame().getChatWindowManager();
this.chatRoom = chatRoom;
List membersList = chatRoom.getMembers();
for (int i = 0; i < membersList.size(); i ++)
{
getChatContactListPanel().addContact((Contact)membersList.get(i));
}
this.chatRoom.addMessageListener(this);
this.chatRoom.addChatRoomPropertyChangeListener(this);
this.chatRoom.addLocalUserStatusListener(this);
/** @todo uncomment when the listener is fully implemented */
// this.chatRoom.addParticipantStatusListener(this);
}
/**
* Implements the <tt>ChatPanel.getChatName</tt> method.
*
* @return the name of the chat room.
*/
public String getChatName()
{
return chatRoom.getName();
}
/**
* Implements the <tt>ChatPanel.getChatIdentifier</tt> method.
*
* @return the <tt>ChatRoom</tt>
*/
public Object getChatIdentifier()
{
return chatRoom;
}
/**
* Implements the <tt>ChatPanel.getChatStatus</tt> method.
*
* @return the status of this chat room
*/
public PresenceStatus getChatStatus()
{
return null;
}
/**
* Implements the <tt>ChatPanel.loadHistory</tt> method.
* <br>
* Loads the history for this chat room.
*/
public void loadHistory()
{
}
/**
* Implements the <tt>ChatPanel.loadHistory(escapedMessageID)</tt> method.
* <br>
* Loads the history for this chat room and escapes the last message
* received.
*/
public void loadHistory(String escapedMessageID)
{
// TODO Auto-generated method stub
}
/**
* Implements the <tt>ChatPanel.loadPreviousFromHistory</tt> method.
* <br>
* Loads the previous "page" in the history.
*/
public void loadPreviousFromHistory()
{
// TODO Auto-generated method stub
}
/**
* Implements the <tt>ChatPanel.loadNextFromHistory</tt> method.
* <br>
* Loads the next "page" in the history.
*/
public void loadNextFromHistory()
{
// TODO Auto-generated method stub
}
/**
* Implements the <tt>ChatPanel.sendMessage</tt> method.
* <br>
* Sends a message to the chat room.
*/
protected void sendMessage()
{
}
/**
* Implements the <tt>ChatPanel.treatReceivedMessage</tt> method.
* <br>
* Treats a received message from the given contact.
*/
public void treatReceivedMessage(Contact sourceContact)
{
}
/**
* Implements the <tt>ChatPanel.sendTypingNotification</tt> method.
* <br>
* Sends a typing notification.
*/
public int sendTypingNotification(int typingState)
{
return 0;
}
/**
* Implements the <tt>ChatPanel.getFirstHistoryMsgTimestamp</tt> method.
*/
public Date getFirstHistoryMsgTimestamp()
{
return null;
}
/**
* Implements the <tt>ChatPanel.getLastHistoryMsgTimestamp</tt> method.
*/
public Date getLastHistoryMsgTimestamp()
{
return null;
}
/**
* Implements the <tt>ChatRoomMessageListener.messageReceived</tt> method.
* <br>
* Obtains the corresponding <tt>ChatPanel</tt> and proccess the message
* there.
*/
public void messageReceived(ChatRoomMessageReceivedEvent evt)
{
ChatRoom sourceChatRoom = (ChatRoom) evt.getSource();
if(!sourceChatRoom.equals(chatRoom))
return;
logger.trace("MESSAGE RECEIVED from contact: "
+ evt.getSourceContact().getAddress());
Contact sourceContact = evt.getSourceContact();
Date date = evt.getTimestamp();
Message message = evt.getSourceMessage();
ChatRoom chatRoom = (ChatRoom) evt.getSource();
ChatPanel chatPanel = chatWindowManager.getChatRoom(chatRoom);
chatPanel.processMessage(
sourceContact.getDisplayName(), date,
Constants.INCOMING_MESSAGE, message.getContent());
chatWindowManager.openChat(chatPanel, false);
GuiActivator.getAudioNotifier()
.createAudio(Sounds.INCOMING_MESSAGE).play();
chatPanel.treatReceivedMessage(sourceContact);
}
/**
* Implements the <tt>ChatRoomMessageListener.messageDelivered</tt> method.
* <br>
* Shows the message in the conversation area and clears the write message
* area.
*/
public void messageDelivered(ChatRoomMessageDeliveredEvent evt)
{
ChatRoom sourceChatRoom = (ChatRoom) evt.getSource();
if(!sourceChatRoom.equals(chatRoom))
return;
logger.trace("MESSAGE DELIVERED to contact: "
+ evt.getDestinationContact().getAddress());
Message msg = evt.getSourceMessage();
ChatPanel chatPanel = null;
if(chatWindowManager.isChatOpenedForChatRoom(sourceChatRoom))
chatPanel = chatWindowManager.getChatRoom(sourceChatRoom);
if (chatPanel != null)
{
ProtocolProviderService protocolProvider = evt
.getDestinationContact().getProtocolProvider();
logger.trace("MESSAGE DELIVERED: process message to chat for contact: "
+ evt.getDestinationContact().getAddress());
chatPanel.processMessage(getChatWindow().getMainFrame()
.getAccount(protocolProvider), evt.getTimestamp(),
Constants.OUTGOING_MESSAGE, msg.getContent());
chatPanel.refreshWriteArea();
}
}
/**
* Implements the <tt>ChatRoomMessageListener.messageDeliveryFailed</tt>
* method.
* <br>
* In the conversation area show an error message, explaining the problem.
*/
public void messageDeliveryFailed(ChatRoomMessageDeliveryFailedEvent evt)
{
ChatRoom sourceChatRoom = (ChatRoom) evt.getSource();
if(!sourceChatRoom.equals(chatRoom))
return;
String errorMsg = null;
Message sourceMessage = (Message) evt.getSource();
Contact sourceContact = evt.getDestinationContact();
if (evt.getErrorCode()
== MessageDeliveryFailedEvent.OFFLINE_MESSAGES_NOT_SUPPORTED) {
errorMsg = Messages.getI18NString(
"msgDeliveryOfflineNotSupported").getText();
}
else if (evt.getErrorCode()
== MessageDeliveryFailedEvent.NETWORK_FAILURE) {
errorMsg = Messages.getI18NString("msgNotDelivered").getText();
}
else if (evt.getErrorCode()
== MessageDeliveryFailedEvent.PROVIDER_NOT_REGISTERED) {
errorMsg = Messages.getI18NString(
"msgSendConnectionProblem").getText();
}
else if (evt.getErrorCode()
== MessageDeliveryFailedEvent.INTERNAL_ERROR) {
errorMsg = Messages.getI18NString(
"msgDeliveryInternalError").getText();
}
else {
errorMsg = Messages.getI18NString(
"msgDeliveryFailedUnknownError").getText();
}
ChatPanel chatPanel = chatWindowManager
.getChatRoom(chatRoom);
chatPanel.refreshWriteArea();
chatPanel.processMessage(
sourceContact.getDisplayName(),
new Date(System.currentTimeMillis()),
Constants.OUTGOING_MESSAGE,
sourceMessage.getContent());
chatPanel.processMessage(
sourceContact.getDisplayName(),
new Date(System.currentTimeMillis()),
Constants.ERROR_MESSAGE,
errorMsg);
chatWindowManager.openChat(chatPanel, false);
}
public void chatRoomChanged(ChatRoomPropertyChangeEvent evt)
{
}
public void localUserStatusChanged(ChatRoomLocalUserStatusChangeEvent evt)
{
}
public void localUserStatusChanged(ChatRoomParticipantStatusChangeEvent evt)
{
}
}
| implement send message for a multi user chat
| src/net/java/sip/communicator/impl/gui/main/chat/conference/ConferenceChatPanel.java | implement send message for a multi user chat | <ide><path>rc/net/java/sip/communicator/impl/gui/main/chat/conference/ConferenceChatPanel.java
<ide> import net.java.sip.communicator.impl.gui.i18n.*;
<ide> import net.java.sip.communicator.impl.gui.main.chat.*;
<ide> import net.java.sip.communicator.impl.gui.utils.*;
<del>import net.java.sip.communicator.service.gui.*;
<ide> import net.java.sip.communicator.service.protocol.*;
<ide> import net.java.sip.communicator.service.protocol.event.*;
<ide> import net.java.sip.communicator.util.*;
<ide> implements ChatRoomMessageListener,
<ide> ChatRoomPropertyChangeListener,
<ide> ChatRoomLocalUserStatusListener,
<del> ChatRoomParticipantStatusListener
<add> ChatRoomMemberListener
<ide> {
<ide> private Logger logger = Logger.getLogger(ConferenceChatPanel.class);
<ide>
<ide> this.chatRoom.addMessageListener(this);
<ide> this.chatRoom.addChatRoomPropertyChangeListener(this);
<ide> this.chatRoom.addLocalUserStatusListener(this);
<del>/** @todo uncomment when the listener is fully implemented */
<del>// this.chatRoom.addParticipantStatusListener(this);
<add> this.chatRoom.addMemberListener(this);
<ide> }
<ide>
<ide> /**
<ide> * Sends a message to the chat room.
<ide> */
<ide> protected void sendMessage()
<del> {
<add>
<add> {
<add> String body = this.getTextFromWriteArea();
<add> Message msg = chatRoom.createMessage(body);
<add>
<add> try
<add> {
<add> chatRoom.sendMessage(msg);
<add> }
<add> catch (Exception ex)
<add> {
<add> logger.error("Failed to send message.", ex);
<add>
<add> this.refreshWriteArea();
<add>
<add> this.processMessage(
<add> chatRoom.getName(),
<add> new Date(System.currentTimeMillis()),
<add> Constants.OUTGOING_MESSAGE,
<add> msg.getContent());
<add>
<add> this.processMessage(
<add> chatRoom.getName(),
<add> new Date(System.currentTimeMillis()),
<add> Constants.ERROR_MESSAGE,
<add> Messages.getI18NString("msgDeliveryInternalError")
<add> .getText());
<add> }
<ide> }
<ide>
<ide> /**
<ide> chatWindowManager.openChat(chatPanel, false);
<ide> }
<ide>
<del> public void chatRoomChanged(ChatRoomPropertyChangeEvent evt)
<del> {
<add> public void chatRoomChanged(ChatRoomPropertyChangeEvent event)
<add> {
<ide> }
<ide>
<ide> public void localUserStatusChanged(ChatRoomLocalUserStatusChangeEvent evt)
<del> {
<del>
<del> }
<del>
<del> public void localUserStatusChanged(ChatRoomParticipantStatusChangeEvent evt)
<del> {
<del>
<add> {
<add> }
<add>
<add> public void memberStatusChanged(ChatRoomMemberEvent evt)
<add> {
<ide> }
<ide> } |
|
JavaScript | mit | a8569333713c2bdcd832a38ea790bfa75ba9db38 | 0 | aminmarashi/binary-bot,binary-com/binary-bot,binary-com/binary-bot,aminmarashi/binary-bot | 'use strict';
import logger from './logger';
import TradeInfo from './tradeInfo';
import account from 'binary-common-utils/account';
import Observer from 'binary-common-utils/observer';
import _Blockly from './blockly';
import storageManager from 'binary-common-utils/storageManager';
import Translator from 'translator';
import Bot from '../bot';
import Introduction from './tours/introduction';
import Welcome from './tours/welcome';
import {PlainChart as Chart} from 'binary-charts';
import lzString from 'lz-string';
import _ from 'underscore';
var View = function View(){
if ( View.instance ) {
return View.instance;
}
this.observer = new Observer();
View.instance = this;
this.chartType = 'area';
this.tours = {};
this.translator = new Translator();
this.tradeInfo = new TradeInfo();
this.addTranslationToUi();
this.errorAndLogHandling();
var that = this;
this.bot = new Bot();
this.initPromise = new Promise(function(resolve, reject){
that.updateTokenList();
that.blockly = new _Blockly();
that.blockly.initPromise.then(function(){
that.setElementActions();
that.initTours();
resolve();
});
});
};
View.prototype = Object.create(null, {
updateTokenList: {
value: function updateTokenList() {
var tokenList = storageManager.getTokenList();
if (tokenList.length === 0) {
$('#login').css('display', 'inline-block');
$('#accountSelect').css('display', 'none');
$('#logout').css('display', 'none');
} else {
$('#login').css('display', 'none');
$('#accountSelect').css('display', 'inline-block');
$('#logout').css('display', 'inline-block');
tokenList.forEach(function (tokenInfo) {
var str;
if ( tokenInfo.hasOwnProperty('isVirtual') ) {
str = (tokenInfo.isVirtual) ? 'Virtual Account' : 'Real Account';
} else {
str = '';
}
$('#accountSelect').append('<option value="' + tokenInfo.token + '">'+str + ' (' + tokenInfo.account_name+ ') ' + '</option>');
});
}
}
},
addTranslationToUi: {
value: function addTranslationToUi(){
var that = this;
$('[data-i18n-text]')
.each(function() {
var contents = $(this).contents();
if (contents.length > 0) {
if (contents.get(0).nodeType == Node.TEXT_NODE) {
$(this).text(that.translator.translateText($(this)
.attr('data-i18n-text')))
.append(contents.slice(1));
}
} else {
$(this)
.text(that.translator.translateText($(this)
.attr('data-i18n-text')));
}
});
}
},
initTours: {
value: function initTours() {
this.tours.introduction = new Introduction();
this.tours.welcome = new Welcome();
}
},
startTour: {
value: function startTour() {
var that = this;
$('#tours').on('change', function(e) {
var value = $(this).val();
if (value === '') return;
if (that.activeTour) {
that.activeTour.stop();
}
that.activeTour = that.tours[value];
that.activeTour.start(function(){
that.activeTour = null;
});
});
}
},
errorAndLogHandling: {
value: function errorAndLogHandling(){
var that = this;
this.observer.register('ui.error', function showError(error) {
var api = true;
if (error.stack) {
api = false;
if (logger.isDebug()) {
console.log('%c' + error.stack, 'color: red');
} else {
logger.addLogToQueue('%c' + error.stack, 'color: red');
}
}
console.error({
api: api,
0: error.message,
1: lzString.compressToBase64(JSON.stringify(error.stack)),
2: lzString.compressToBase64(that.blockly.generatedJs),
3: lzString.compressToBase64(that.blockly.blocksXmlStr)
});
var message = error.message;
$.notify(message, {
position: 'bottom right',
className: 'error',
});
if (logger.isDebug()) {
console.log('%cError: ' + message, 'color: red');
} else {
logger.addLogToQueue('%cError: ' + message, 'color: red');
}
});
var observeForLog = function observeForLog(type, position) {
var subtype = ( position === 'left' )? '.left' : '';
that.observer.register('ui.log.' + type + subtype , function(message){
if ( type === 'warn' ) {
console.warn(message);
}
$.notify(message, {
position: 'bottom ' + position,
className: type,
});
if (logger.isDebug()) {
console.log(message);
} else {
logger.addLogToQueue(message);
}
});
};
["success", "info", "warn", "error"].forEach(function(type){
observeForLog(type, 'right');
observeForLog(type, 'left');
});
}
},
setFileBrowser: {
value: function setFileBrowser(){
var that = this;
var handleFileSelect = function handleFileSelect(e) {
var files;
if (e.type === 'drop') {
e.stopPropagation();
e.preventDefault();
files = e.dataTransfer.files;
} else {
files = e.target.files;
}
files = Array.prototype.slice.apply(files);
var file = files[0];
if (file) {
if (file.type.match('text/xml')) {
readFile(file);
} else {
that.observer.emit('ui.log.info', that.translator.translateText('File is not supported:' + ' ') + file.name);
}
}
};
var readFile = function readFile(f) {
var reader = new FileReader();
reader.onload = (function (theFile) {
$('#fileBrowser').hide();
return function (e) {
try {
that.blockly.loadBlocks(e.target.result);
that.observer.emit('ui.log.success', that.translator.translateText('Blocks are loaded successfully'));
} catch (err) {
that.observer.emit('ui.error', err);
}
};
})(f);
reader.readAsText(f);
};
var handleDragOver = function handleDragOver(e) {
e.stopPropagation();
e.preventDefault();
e.dataTransfer.dropEffect = 'copy';
};
var dropZone = document.getElementById('dropZone');
dropZone.addEventListener('dragover', handleDragOver, false);
dropZone.addEventListener('drop', handleFileSelect, false);
if (document.getElementById('files')) {
document.getElementById('files')
.addEventListener('change', handleFileSelect, false);
}
$('#open_btn')
.on('click', function() {
$.FileDialog({
accept: ".xml",
cancelButton: "Close",
dragMessage: "Drop files here",
dropheight: 400,
errorMessage: "An error occured while loading file",
multiple: false,
okButton: "OK",
readAs: "DataURL",
removeMessage: "Remove file",
title: "Load file"
});
})
.on('files.bs.filedialog', function(ev) {
var files_list = ev.files;
handleFileSelect(files_list);
})
.on('cancel.bs.filedialog', function(ev) {
handleFileSelect(ev);
});
}
},
setElementActions: {
value: function setElementActions(){
this.setFileBrowser();
this.startTour();
this.addBindings();
this.addEventHandlers();
this.tradeInfo.show();
}
},
addBindings: {
value: function addBindings(){
var that = this;
var stop = function stop(e) {
if (e) {
e.preventDefault();
}
that.bot.stop();
};
var logout = function logout() {
account.logoutAllTokens(function(){
that.updateTokenList();
that.observer.emit('ui.log.info', that.translator.translateText('Logged you out!'));
});
};
$('#stopButton')
.click(stop)
.hide();
$('.panelExitButton')
.click(function () {
$(this)
.parent()
.hide();
});
$('.panel')
.hide();
$('.panel')
.drags();
$('#chart')
.mousedown(function (e) { // prevent chart to trigger draggable
e.stopPropagation();
});
$('table')
.mousedown(function (e) { // prevent tables to trigger draggable
e.stopPropagation();
});
$('#saveXml')
.click(function (e) {
that.blockly.saveXml();
});
$('#undo')
.click(function (e) {
that.blockly.undo();
});
$('#redo')
.click(function (e) {
that.blockly.redo();
});
$('#showSummary')
.click(function (e) {
$('#summaryPanel')
.show();
});
$('#loadXml')
.click(function (e) {
$('#fileBrowser')
.show();
});
$('#logout')
.click(function (e) {
logout();
$('.logout').hide();
});
$('#runButton')
.click(function (e) {
$('#stopButton').show();
$('#runButton').hide();
that.blockly.run();
});
$('#resetButton')
.click(function (e) {
that.blockly.loadBlocks();
});
$('#login')
.bind('click.login', function(e){
document.location = 'https://oauth.binary.com/oauth2/authorize?app_id=' + storageManager.get('appId') + '&l=' + that.translator.getLanguage().toUpperCase();
})
.text('Log in');
$(document).keydown(function(e) {
switch(e.which) {
case 189: // -
if ( e.ctrlKey ) {
that.blockly.zoomOnPlusMinus(false);
}
break;
case 187: // +
if ( e.ctrlKey ) {
that.blockly.zoomOnPlusMinus(true);
}
break;
case 39: // right
if (that.activeTour) {
that.activeTour.next();
} else {
return;
}
break;
default: return; // exit this handler for other keys
}
e.preventDefault(); // prevent the default action (scroll / move caret)
});
}
},
updateChart: {
value: function updateChart(info) {
var that = this;
var chartOptions = {
type: this.chartType,
theme: 'light',
};
if ( this.chartType === 'candlestick' ) {
chartOptions.ticks = info.candles;
} else {
chartOptions.ticks = info.ticks;
}
if (this.latestOpenContract) {
chartOptions.contract = this.latestOpenContract;
if (this.latestOpenContract.is_sold) {
delete this.latestOpenContract;
}
}
chartOptions.pipSize = Number(Number(info.pip)
.toExponential()
.substring(3));
if (!this.chart) {
this.chart = Chart('chart', chartOptions);
} else {
this.chart.updateChart(chartOptions);
}
}
},
addEventHandlers: {
value: function addEventHandlers() {
var that = this;
this.observer.register('api.error', function(error){
if (error.code === 'InvalidToken'){
storageManager.removeAllTokens();
that.updateTokenList();
}
that.bot.stop();
that.observer.emit('ui.error', error);
});
this.observer.register('bot.stop', function(tradeInfo){
$('#runButton').show();
$('#stopButton').hide();
});
this.observer.register('bot.tradeInfo', function(tradeInfo){
_.extend(that.tradeInfo.tradeInfo, tradeInfo);
that.tradeInfo.update();
});
this.observer.register('bot.tradeUpdate', function(contract){
that.latestOpenContract = contract;
});
this.observer.register('bot.finish', function(contract){
that.tradeInfo.add(contract);
});
this.observer.register('bot.tickUpdate', function(info){
that.updateChart(info);
});
}
}
});
module.exports = View;
| src/botPage/view/index.js | 'use strict';
import logger from './logger';
import TradeInfo from './tradeInfo';
import account from 'binary-common-utils/account';
import Observer from 'binary-common-utils/observer';
import _Blockly from './blockly';
import storageManager from 'binary-common-utils/storageManager';
import Translator from 'translator';
import Bot from '../bot';
import Introduction from './tours/introduction';
import Welcome from './tours/welcome';
import {PlainChart as Chart} from 'binary-charts';
import lzString from 'lz-string';
import _ from 'underscore';
var View = function View(){
if ( View.instance ) {
return View.instance;
}
this.observer = new Observer();
View.instance = this;
this.chartType = 'area';
this.tours = {};
this.translator = new Translator();
this.tradeInfo = new TradeInfo();
this.addTranslationToUi();
this.errorAndLogHandling();
var that = this;
this.bot = new Bot();
this.initPromise = new Promise(function(resolve, reject){
that.updateTokenList();
that.blockly = new _Blockly();
that.blockly.initPromise.then(function(){
that.setElementActions();
that.initTours();
resolve();
});
});
};
View.prototype = Object.create(null, {
updateTokenList: {
value: function updateTokenList() {
var tokenList = storageManager.getTokenList();
if (tokenList.length === 0) {
$('#login').css('display', 'inline-block');
$('#accountSelect').css('display', 'none');
$('#logout').css('display', 'none');
} else {
$('#login').css('display', 'none');
$('#accountSelect').css('display', 'inline-block');
$('#logout').css('display', 'inline-block');
tokenList.forEach(function (tokenInfo) {
var str;
if ( tokenInfo.hasOwnProperty('isVirtual') ) {
str = (tokenInfo.isVirtual) ? 'Virtual Account' : 'Real Account';
} else {
str = '';
}
$('#accountSelect').append('<option value="' + tokenInfo.token + '">'+str + ' (' + tokenInfo.account_name+ ') ' + '</option>');
});
}
}
},
addTranslationToUi: {
value: function addTranslationToUi(){
var that = this;
$('[data-i18n-text]')
.each(function() {
var contents = $(this).contents();
if (contents.length > 0) {
if (contents.get(0).nodeType == Node.TEXT_NODE) {
$(this).text(that.translator.translateText($(this)
.attr('data-i18n-text')))
.append(contents.slice(1));
}
} else {
$(this)
.text(that.translator.translateText($(this)
.attr('data-i18n-text')));
}
});
}
},
initTours: {
value: function initTours() {
this.tours.introduction = new Introduction();
this.tours.welcome = new Welcome();
}
},
startTour: {
value: function startTour() {
var that = this;
$('#tours').on('change', function(e) {
var value = $(this).val();
if (value === '') return;
if (that.activeTour) {
that.activeTour.stop();
}
that.activeTour = that.tours[value];
that.activeTour.start(function(){
that.activeTour = null;
});
});
}
},
errorAndLogHandling: {
value: function errorAndLogHandling(){
var that = this;
this.observer.register('ui.error', function showError(error) {
var api = true;
if (error.stack) {
api = false;
if (logger.isDebug()) {
console.log('%c' + error.stack, 'color: red');
} else {
logger.addLogToQueue('%c' + error.stack, 'color: red');
}
}
console.error({
api: api,
0: error.message,
1: lzString.compressToBase64(JSON.stringify(error.stack)),
2: lzString.compressToBase64(that.blockly.generatedJs),
3: lzString.compressToBase64(that.blockly.blocksXmlStr)
});
var message = error.message;
$.notify(message, {
position: 'bottom right',
className: 'error',
});
if (logger.isDebug()) {
console.log('%cError: ' + message, 'color: red');
} else {
logger.addLogToQueue('%cError: ' + message, 'color: red');
}
});
var observeForLog = function observeForLog(type, position) {
var subtype = ( position === 'left' )? '.left' : '';
that.observer.register('ui.log.' + type + subtype , function(message){
if ( type === 'warn' ) {
console.warn(message);
}
$.notify(message, {
position: 'bottom ' + position,
className: type,
});
if (logger.isDebug()) {
console.log(message);
} else {
logger.addLogToQueue(message);
}
});
};
["success", "info", "warn", "error"].forEach(function(type){
observeForLog(type, 'right');
observeForLog(type, 'left');
});
}
},
setFileBrowser: {
value: function setFileBrowser(){
var that = this;
var handleFileSelect = function handleFileSelect(e) {
var files;
if (e.type === 'drop') {
e.stopPropagation();
e.preventDefault();
files = e.dataTransfer.files;
} else {
files = e.target.files;
}
files = Array.prototype.slice.apply(files);
var file = files[0];
if (file) {
if (file.type.match('text/xml')) {
readFile(file);
} else {
that.observer.emit('ui.log.info', that.translator.translateText('File is not supported:' + ' ') + file.name);
}
}
};
var readFile = function readFile(f) {
var reader = new FileReader();
reader.onload = (function (theFile) {
$('#fileBrowser').hide();
return function (e) {
try {
that.blockly.loadBlocks(e.target.result);
that.observer.emit('ui.log.success', that.translator.translateText('Blocks are loaded successfully'));
} catch (err) {
that.observer.emit('ui.error', err);
}
};
})(f);
reader.readAsText(f);
};
var handleDragOver = function handleDragOver(e) {
e.stopPropagation();
e.preventDefault();
e.dataTransfer.dropEffect = 'copy';
};
var dropZone = document.getElementById('dropZone');
dropZone.addEventListener('dragover', handleDragOver, false);
dropZone.addEventListener('drop', handleFileSelect, false);
if (document.getElementById('files')) {
document.getElementById('files')
.addEventListener('change', handleFileSelect, false);
}
$('#open_btn')
.on('click', function() {
$.FileDialog({
accept: ".xml",
cancelButton: "Close",
dragMessage: "Drop files here",
dropheight: 400,
errorMessage: "An error occured while loading file",
multiple: false,
okButton: "OK",
readAs: "DataURL",
removeMessage: "Remove file",
title: "Load file"
});
})
.on('files.bs.filedialog', function(ev) {
var files_list = ev.files;
handleFileSelect(files_list);
})
.on('cancel.bs.filedialog', function(ev) {
handleFileSelect(ev);
});
}
},
setElementActions: {
value: function setElementActions(){
this.setFileBrowser();
this.startTour();
this.addBindings();
this.addEventHandlers();
this.tradeInfo.show();
}
},
addBindings: {
value: function addBindings(){
var that = this;
var stop = function stop(e) {
if (e) {
e.preventDefault();
}
that.bot.stop();
};
var logout = function logout() {
account.logoutAllTokens(function(){
that.updateTokenList();
that.observer.emit('ui.log.info', that.translator.translateText('Logged you out!'));
});
};
$('#stopButton')
.click(stop)
.hide();
$('.panelExitButton')
.click(function () {
$(this)
.parent()
.hide();
});
$('.panel')
.hide();
$('.panel')
.drags();
$('#chart')
.mousedown(function (e) { // prevent chart to trigger draggable
e.stopPropagation();
});
$('table')
.mousedown(function (e) { // prevent tables to trigger draggable
e.stopPropagation();
});
$('#saveXml')
.click(function (e) {
that.blockly.saveXml();
});
$('#undo')
.click(function (e) {
that.blockly.undo();
});
$('#redo')
.click(function (e) {
that.blockly.redo();
});
$('#showSummary')
.click(function (e) {
$('#summaryPanel')
.show();
});
$('#loadXml')
.click(function (e) {
$('#fileBrowser')
.show();
});
$('#logout')
.click(function (e) {
logout();
$('.logout').hide();
});
$('#runButton')
.click(function (e) {
$('#stopButton').show();
$('#runButton').hide();
that.blockly.run();
});
$('#resetButton')
.click(function (e) {
that.blockly.loadBlocks();
});
$('#login')
.bind('click.login', function(e){
document.location = 'https://oauth.binary.com/oauth2/authorize?app_id=' + storageManager.get('appId') + '&l=' + that.translator.getLanguage().toUpperCase();
})
.text('Log in');
$(document).keydown(function(e) {
switch(e.which) {
case 189: // -
if ( e.ctrlKey ) {
that.blockly.zoomOnPlusMinus(false);
}
break;
case 187: // +
if ( e.ctrlKey ) {
that.blockly.zoomOnPlusMinus(true);
}
break;
case 39: // right
if (that.activeTour) {
that.activeTour.next();
} else {
return;
}
break;
default: return; // exit this handler for other keys
}
e.preventDefault(); // prevent the default action (scroll / move caret)
});
}
},
updateChart: {
value: function updateChart(info) {
var that = this;
var chartOptions = {
type: this.chartType,
theme: 'light',
typeChange: function ( type ) {
that.chartType = type;
}
};
if ( this.chartType === 'candlestick' ) {
console.log(info.candles);
chartOptions.ticks = info.candles;
} else {
chartOptions.ticks = info.ticks;
}
if (this.latestOpenContract) {
chartOptions.contract = this.latestOpenContract;
if (this.latestOpenContract.is_sold) {
delete this.latestOpenContract;
}
}
chartOptions.pipSize = Number(Number(info.pip)
.toExponential()
.substring(3));
if (!this.chart) {
this.chart = Chart('chart', chartOptions);
} else {
this.chart.updateChart(chartOptions);
}
}
},
addEventHandlers: {
value: function addEventHandlers() {
var that = this;
this.observer.register('api.error', function(error){
if (error.code === 'InvalidToken'){
storageManager.removeAllTokens();
that.updateTokenList();
}
that.bot.stop();
that.observer.emit('ui.error', error);
});
this.observer.register('bot.stop', function(tradeInfo){
$('#runButton').show();
$('#stopButton').hide();
});
this.observer.register('bot.tradeInfo', function(tradeInfo){
_.extend(that.tradeInfo.tradeInfo, tradeInfo);
that.tradeInfo.update();
});
this.observer.register('bot.tradeUpdate', function(contract){
that.latestOpenContract = contract;
});
this.observer.register('bot.finish', function(contract){
that.tradeInfo.add(contract);
});
this.observer.register('bot.tickUpdate', function(info){
that.updateChart(info);
});
}
}
});
module.exports = View;
| Removed typechange from the chart update function
| src/botPage/view/index.js | Removed typechange from the chart update function | <ide><path>rc/botPage/view/index.js
<ide> var chartOptions = {
<ide> type: this.chartType,
<ide> theme: 'light',
<del> typeChange: function ( type ) {
<del> that.chartType = type;
<del> }
<ide> };
<ide> if ( this.chartType === 'candlestick' ) {
<del> console.log(info.candles);
<ide> chartOptions.ticks = info.candles;
<ide> } else {
<ide> chartOptions.ticks = info.ticks; |
|
Java | mit | a1e04a2e36318b38bc782f9744f225eaa11fc3d8 | 0 | TakayukiHoshi1984/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android | /*
DConnectProfile.java
Copyright (c) 2014 NTT DOCOMO,INC.
Released under the MIT license
http://opensource.org/licenses/mit-license.php
*/
package org.deviceconnect.android.profile;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import org.deviceconnect.android.event.Event;
import org.deviceconnect.android.message.DConnectMessageService;
import org.deviceconnect.android.message.MessageUtils;
import org.deviceconnect.android.profile.api.DConnectApi;
import org.deviceconnect.android.profile.spec.DConnectApiSpec;
import org.deviceconnect.android.service.DConnectService;
import org.deviceconnect.message.DConnectMessage;
import org.deviceconnect.message.intent.message.IntentDConnectMessage;
import org.deviceconnect.profile.DConnectProfileConstants;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
/**
* DConnect プロファイルクラス.
* @author NTT DOCOMO, INC.
*/
public abstract class DConnectProfile implements DConnectProfileConstants {
/** バッファサイズを定義. */
private static final int BUF_SIZE = 4096;
/**
* コンテキスト.
*/
private Context mContext;
/**
* ロガー.
*/
protected final Logger mLogger = Logger.getLogger("org.deviceconnect.dplugin");
/**
* サポートするAPI.
*/
protected final Map<ApiIdentifier, DConnectApi> mApis
= new HashMap<ApiIdentifier, DConnectApi>();
public List<DConnectApi> getApiList() {
List<DConnectApi> list = new ArrayList<DConnectApi>();
list.addAll(mApis.values());
return list;
}
public DConnectApi findApi(final Intent request) {
String action = request.getAction();
DConnectApiSpec.Method method = DConnectApiSpec.Method.fromAction(action);
if (method == null) {
return null;
}
String path = getApiPath(getProfile(request), getInterface(request), getAttribute(request));
return findApi(path, method);
}
public DConnectApi findApi(final String path, final DConnectApiSpec.Method method) {
return mApis.get(new ApiIdentifier(path, method));
}
public void addApi(final DConnectApi api) {
mApis.put(new ApiIdentifier(getApiPath(api), api.getMethod()), api);
}
public void removeApi(final DConnectApi api) {
mApis.remove(new ApiIdentifier(getApiPath(api), api.getMethod()));
}
private String getApiPath(final DConnectApi api) {
return getApiPath(getProfileName(), api.getInterface(), api.getAttribute());
}
private String getApiPath(final String profileName, final String interfaceName,
final String attributeName) {
StringBuilder path = new StringBuilder();
path.append("/");
path.append(profileName);
if (interfaceName != null) {
path.append("/");
path.append(interfaceName);
}
if (attributeName != null) {
path.append("/");
path.append(attributeName);
}
return path.toString();
}
/**
* プロファイル名を取得する.
*
* @return プロファイル名
*/
public abstract String getProfileName();
/**
* RESPONSEメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @param service サービス
* @return レスポンスパラメータを送信するか否か
*/
public boolean onRequest(final Intent request, final Intent response, final DConnectService service) {
DConnectApi api = findApi(request);
if (api != null) {
return api.onRequest(request, response, service);
} else {
return onRequest(request, response);
}
}
/**
* RESPONSEメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
public boolean onRequest(final Intent request, final Intent response) {
String action = request.getAction();
boolean send = true;
try {
if (IntentDConnectMessage.ACTION_GET.equals(action)) {
send = onGetRequest(request, response);
} else if (IntentDConnectMessage.ACTION_POST.equals(action)) {
send = onPostRequest(request, response);
} else if (IntentDConnectMessage.ACTION_PUT.equals(action)) {
send = onPutRequest(request, response);
} else if (IntentDConnectMessage.ACTION_DELETE.equals(action)) {
send = onDeleteRequest(request, response);
} else {
mLogger.warning("Unknown action. action=" + action);
MessageUtils.setNotSupportActionError(response);
}
} catch (Exception e) {
mLogger.severe("Exception occurred in the profile. " + e.getMessage());
MessageUtils.setUnknownError(response, e.getMessage());
}
return send;
}
/**
* GETメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
protected boolean onGetRequest(final Intent request, final Intent response) {
MessageUtils.setNotSupportActionError(response);
return true;
}
/**
* POSTメソッドハンドラー.
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
protected boolean onPostRequest(final Intent request, final Intent response) {
MessageUtils.setNotSupportActionError(response);
return true;
}
/**
* PUTメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
protected boolean onPutRequest(final Intent request, final Intent response) {
MessageUtils.setNotSupportActionError(response);
return true;
}
/**
* DELETEメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
protected boolean onDeleteRequest(final Intent request, final Intent response) {
MessageUtils.setNotSupportActionError(response);
return true;
}
/**
* コンテキストの設定する.
*
* @param context コンテキスト
*/
public void setContext(final Context context) {
mContext = context;
}
/**
* コンテキストの取得する.
*
* @return コンテキスト
*/
public Context getContext() {
return mContext;
}
/**
* 指定されたオブジェクトがStringか指定されたNumberクラスかを判定し、指定されたNumberクラスへ変換する.
*
* @param o 値
* @param clazz 型情報
* @param <T> ナンバークラスの型。判定出来るのは {@link Byte}、{@link Short}、{@link Integer}、
* {@link Long}、{@link Float}、{@link Double} のみ。
* @return 指定されたナンバークラスのオブジェクト。変換に失敗した場合はnullを返す。
*/
@SuppressWarnings("unchecked")
private static <T extends Number> Number valueOf(final Object o, final Class<T> clazz) {
if (o == null) {
return null;
}
Number result = null;
if (o instanceof String) {
try {
if (Integer.class.equals(clazz)) {
result = Integer.valueOf((String) o);
} else if (Long.class.equals(clazz)) {
result = Long.valueOf((String) o);
} else if (Double.class.equals(clazz)) {
result = Double.valueOf((String) o);
} else if (Byte.class.equals(clazz)) {
result = Byte.valueOf((String) o);
} else if (Short.class.equals(clazz)) {
result = Short.valueOf((String) o);
} else if (Float.class.equals(clazz)) {
result = Float.valueOf((String) o);
}
} catch (NumberFormatException e) {
result = null;
}
} else if (o.getClass().equals(clazz)) {
result = (T) o;
}
return result;
}
/**
* 指定されたオブジェクトがStringかIntegerかを判定し、Integerへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Integer parseInteger(final Object o) {
Integer res = (Integer) valueOf(o, Integer.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかIntegerかを判定し、Integerへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Integer parseInteger(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Integer res = parseInteger(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかLongかを判定し、Longへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Long parseLong(final Object o) {
Long res = (Long) valueOf(o, Long.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかLongかを判定し、Longへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Long parseLong(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Long res = parseLong(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかDoubleかを判定し、Doubleへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Double parseDouble(final Object o) {
Double res = (Double) valueOf(o, Double.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかDoubleかを判定し、Doubleへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Double parseDouble(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Double res = parseDouble(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかFloatかを判定し、Floatへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Float parseFloat(final Object o) {
Float res = (Float) valueOf(o, Float.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかFloatかを判定し、Floatへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Float parseFloat(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Float res = parseFloat(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかByteかを判定し、Byteへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Byte parseByte(final Object o) {
Byte res = (Byte) valueOf(o, Byte.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかByteかを判定し、Byteへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Byte parseByte(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Byte res = parseByte(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかShortかを判定し、Shortへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Short parseShort(final Object o) {
Short res = (Short) valueOf(o, Short.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかShortかを判定し、Shortへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Short parseShort(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Short res = parseShort(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかBooleanかを判定し、Booleanへ変換する.
* Stringの場合は、"true"の場合true、"false"の場合falseを返す。その他はnullを返す。
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Boolean parseBoolean(final Object o) {
if (o instanceof String) {
if (o.equals("true")) {
return Boolean.TRUE;
} else if (o.equals("false")) {
return Boolean.FALSE;
}
} else if (o instanceof Boolean) {
return (Boolean) o;
}
return null;
}
/**
* Intentの指定されたパラメータがStringかBooleanかを判定し、Booleanへ変換する.
* Stringの場合は、"true"の場合true、"false"の場合falseを返す。その他はnullを返す。
*
* @param intent インテント
* @param key キー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Boolean parseBoolean(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
return parseBoolean(b.get(key));
}
/**
* リクエストからサービスIDを取得する.
*
* @param request リクエストパラメータ
* @return サービスID。無い場合はnullを返す。
*/
public static String getServiceID(final Intent request) {
String serviceId = request.getStringExtra(PARAM_SERVICE_ID);
return serviceId;
}
/**
* メッセージにサービスIDを設定する.
*
* @param message メッセージパラメータ
* @param serviceId サービスID
*/
public static void setServiceID(final Intent message, final String serviceId) {
message.putExtra(PARAM_SERVICE_ID, serviceId);
}
/**
* リクエストからAPI名を取得する.
*
* @param request リクエストパラメータ
* @return API名。無い場合はnullを返す。
*/
public static String getApi(final Intent request) {
String api = request.getStringExtra(DConnectMessage.EXTRA_API);
return api;
}
/**
* メッセージにAPI名を設定する.
*
* @param message メッセージパラメータ
* @param api API名
*/
public static void setApi(final Intent message, final String api) {
message.putExtra(DConnectMessage.EXTRA_API, api);
}
/**
* リクエストからプロファイル名を取得する.
*
* @param request リクエストパラメータ
* @return プロファイル名。無い場合はnullを返す。
*/
public static String getProfile(final Intent request) {
String profile = request.getExtras().getString(DConnectMessage.EXTRA_PROFILE);
return profile;
}
/**
* メッセージにプロファイル名を設定する.
*
* @param message メッセージパラメータ
* @param profile プロファイル名
*/
public static void setProfile(final Intent message, final String profile) {
message.putExtra(DConnectMessage.EXTRA_PROFILE, profile);
}
/**
* リクエストからインターフェース名を取得する.
*
* @param request リクエストパラメータsetProfile
* @return インターフェース。無い場合はnullを返す。
*/
public static String getInterface(final Intent request) {
String inter = request.getExtras().getString(DConnectMessage.EXTRA_INTERFACE);
return inter;
}
/**
* メッセージにインターフェース名を設定する.
*
* @param message メッセージパラメータ
* @param inter インターフェース名
*/
public static void setInterface(final Intent message, final String inter) {
message.putExtra(DConnectMessage.EXTRA_INTERFACE, inter);
}
/**
* リクエストから属性名を取得する.
*
* @param request リクエストパラメータ
* @return 属性名。無い場合はnullを返す。
*/
public static String getAttribute(final Intent request) {
String attribute = request.getExtras().getString(DConnectMessage.EXTRA_ATTRIBUTE);
return attribute;
}
/**
* メッセージに属性名を設定する.
*
* @param message メッセージパラメータ
* @param attribute コールバック名
*/
public static void setAttribute(final Intent message, final String attribute) {
message.putExtra(DConnectMessage.EXTRA_ATTRIBUTE, attribute);
}
/**
* レスポンス結果を設定する.
*
* @param response レスポンスパラメータ
* @param result レスポンス結果
*/
public static void setResult(final Intent response, final int result) {
response.putExtra(DConnectMessage.EXTRA_RESULT, result);
}
/**
* レスポンス結果を取得する.
*
* @param response レスポンスパラメータ
* @return レスポンス結果
*/
public static int getResult(final Intent response) {
int result = response.getIntExtra(DConnectMessage.EXTRA_RESULT, -1);
return result;
}
/**
* リクエストからセッションキーを取得する.
*
* @param request リクエストパラメータ
* @return セッションキー。無い場合はnullを返す。
*/
public static String getSessionKey(final Intent request) {
String sessionKey = request.getStringExtra(PARAM_SESSION_KEY);
return sessionKey;
}
/**
* メッセージにセッションキーを設定する.
*
* @param message メッセージパラメータ
* @param sessionKey セッションキー
*/
public static void setSessionKey(final Intent message, final String sessionKey) {
message.putExtra(PARAM_SESSION_KEY, sessionKey);
}
/**
* リクエストからアクセストークンを取得する.
*
* @param request リクエストパラメータ
* @return アクセストークン。無い場合はnullを返す。
*/
public static String getAccessToken(final Intent request) {
String accessToken = request.getStringExtra(DConnectMessage.EXTRA_ACCESS_TOKEN);
return accessToken;
}
/**
* メッセージにアクセストークンを設定する.
*
* @param message メッセージパラメータ
* @param accessToken アクセストークン
*/
public static void setAccessToken(final Intent message, final String accessToken) {
message.putExtra(DConnectMessage.EXTRA_ACCESS_TOKEN, accessToken);
}
/**
* リクエストからDeviceConnectManagerのバージョン名を取得する.
*
* @param request リクエストパラメータ
* @return DeviceConnectManagerのバージョン名。無い場合はnullを返す。
*/
public static String getVersion(final Intent request) {
String version = request.getStringExtra(DConnectMessage.EXTRA_VERSION);
return version;
}
/**
* リクエストからDeviceConnectManagerのバージョン名を設定する.
*
* @param message メッセージパラメータ
* @param version DeviceConnectManagerのバージョン名
*/
public static void setVersion(final Intent message, final String version) {
message.putExtra(DConnectMessage.EXTRA_VERSION, version);
}
/**
* リクエストからDeviceConnectManagerのアプリ名を取得する.
*
* @param request リクエストパラメータ
* @return DeviceConnectManagerのアプリ名。無い場合はnullを返す。
*/
public static String getProduct(final Intent request) {
String product = request.getStringExtra(DConnectMessage.EXTRA_PRODUCT);
return product;
}
/**
* リクエストからDeviceConnectManagerのアプリ名を設定する.
*
* @param message メッセージパラメータ
* @param product DeviceConnectManagerのアプリ名
*/
public static void setProduct(final Intent message, final String product) {
message.putExtra(DConnectMessage.EXTRA_PRODUCT, product);
}
/**
* レスポンスの結果として非サポートエラーを設定する.
*
* @param response レスポンスパラメータ
*/
public static void setUnsupportedError(final Intent response) {
MessageUtils.setNotSupportAttributeError(response);
}
/**
* レスポンスにリクエストコードを設定する.
*
* @param response レスポンスパラメータ
* @param requestCode リクエストコード
*/
public static void setRequestCode(final Intent response, final int requestCode) {
response.putExtra(DConnectMessage.EXTRA_REQUEST_CODE, requestCode);
}
/**
* リクエストからリクエストコードを取得する.
*
* @param request リクエストパラメータ
* @return リクエストコード
*/
public static int getRequestCode(final Intent request) {
return request.getIntExtra(DConnectMessage.EXTRA_REQUEST_CODE, Integer.MIN_VALUE);
}
/**
* レスポンスを返却します.
* @param response レスポンス
*/
protected final void sendResponse(final Intent response) {
((DConnectMessageService) getContext()).sendResponse(response);
}
/**
* イベントを送信します.
* @param event イベント
* @param accessToken アクセストークン
* @return 送信成功の場合true、アクセストークンエラーの場合はfalseを返す。
*/
protected final boolean sendEvent(final Intent event, final String accessToken) {
return ((DConnectMessageService) getContext()).sendEvent(event, accessToken);
}
/**
* イベントを送信します.
* @param event イベント
* @param bundle パラメータ
* @return 送信成功の場合true、アクセストークンエラーの場合はfalseを返す。
*/
protected final boolean sendEvent(final Event event, final Bundle bundle) {
return ((DConnectMessageService) getContext()).sendEvent(event, bundle);
}
/**
* コンテンツデータを取得する.
*
* @param uri URI
* @return コンテンツデータ
*/
protected final byte[] getContentData(final String uri) {
if (uri == null) {
return null;
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
InputStream in = null;
byte[] buf = new byte[BUF_SIZE];
int len;
try {
ContentResolver r = getContext().getContentResolver();
in = r.openInputStream(Uri.parse(uri));
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
return out.toByteArray();
} catch (IOException e) {
return null;
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
private static class ApiIdentifier {
private final String mPath;
private final DConnectApiSpec.Method mMethod;
public ApiIdentifier(final String path, final DConnectApiSpec.Method method) {
if (path == null) {
throw new IllegalArgumentException("path is null.");
}
if (method == null) {
throw new IllegalArgumentException("method is null.");
}
mPath = path;
mMethod = method;
}
public ApiIdentifier(final String path, final String method) {
this(path, DConnectApiSpec.Method.parse(method));
}
@Override
public int hashCode() {
return (mPath + mMethod.getName()).hashCode();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ApiIdentifier)) {
return false;
}
ApiIdentifier that = ((ApiIdentifier) o);
return mPath.equals(that.mPath) && mMethod == that.mMethod;
}
}
}
| dConnectDevicePlugin/dConnectDevicePluginSDK/dconnect-device-plugin-sdk/src/main/java/org/deviceconnect/android/profile/DConnectProfile.java | /*
DConnectProfile.java
Copyright (c) 2014 NTT DOCOMO,INC.
Released under the MIT license
http://opensource.org/licenses/mit-license.php
*/
package org.deviceconnect.android.profile;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import org.deviceconnect.android.event.Event;
import org.deviceconnect.android.message.DConnectMessageService;
import org.deviceconnect.android.message.MessageUtils;
import org.deviceconnect.android.profile.api.DConnectApi;
import org.deviceconnect.android.profile.spec.DConnectApiSpec;
import org.deviceconnect.android.service.DConnectService;
import org.deviceconnect.message.DConnectMessage;
import org.deviceconnect.message.intent.message.IntentDConnectMessage;
import org.deviceconnect.profile.DConnectProfileConstants;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
/**
* DConnect プロファイルクラス.
* @author NTT DOCOMO, INC.
*/
public abstract class DConnectProfile implements DConnectProfileConstants {
/** バッファサイズを定義. */
private static final int BUF_SIZE = 4096;
/**
* コンテキスト.
*/
private Context mContext;
/**
* ロガー.
*/
protected final Logger mLogger = Logger.getLogger("org.deviceconnect.dplugin");
/**
* サポートするAPI.
*/
protected final Map<ApiIdentifier, DConnectApi> mApis
= new HashMap<ApiIdentifier, DConnectApi>();
public List<DConnectApi> getApiList() {
List<DConnectApi> list = new ArrayList<DConnectApi>();
for (DConnectApi api : mApis.values()) {
list.add(api);
}
return list;
}
public DConnectApi findApi(final Intent request) {
String action = request.getAction();
DConnectApiSpec.Method method = DConnectApiSpec.Method.fromAction(action);
if (method == null) {
return null;
}
String path = getApiPath(getProfile(request), getInterface(request), getAttribute(request));
return findApi(path, method);
}
public DConnectApi findApi(final String path, final DConnectApiSpec.Method method) {
return mApis.get(new ApiIdentifier(path, method));
}
public void addApi(final DConnectApi api) {
mApis.put(new ApiIdentifier(getApiPath(api), api.getMethod()), api);
}
public void removeApi(final DConnectApi api) {
mApis.remove(new ApiIdentifier(getApiPath(api), api.getMethod()));
}
private String getApiPath(final DConnectApi api) {
return getApiPath(getProfileName(), api.getInterface(), api.getAttribute());
}
private String getApiPath(final String profileName, final String interfaceName,
final String attributeName) {
StringBuilder path = new StringBuilder();
path.append("/");
path.append(profileName);
if (interfaceName != null) {
path.append("/");
path.append(interfaceName);
}
if (attributeName != null) {
path.append("/");
path.append(attributeName);
}
return path.toString();
}
/**
* プロファイル名を取得する.
*
* @return プロファイル名
*/
public abstract String getProfileName();
/**
* RESPONSEメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @param service サービス
* @return レスポンスパラメータを送信するか否か
*/
public boolean onRequest(final Intent request, final Intent response, final DConnectService service) {
DConnectApi api = findApi(request);
if (api != null) {
return api.onRequest(request, response, service);
} else {
return onRequest(request, response);
}
}
/**
* RESPONSEメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
public boolean onRequest(final Intent request, final Intent response) {
String action = request.getAction();
boolean send = true;
try {
if (IntentDConnectMessage.ACTION_GET.equals(action)) {
send = onGetRequest(request, response);
} else if (IntentDConnectMessage.ACTION_POST.equals(action)) {
send = onPostRequest(request, response);
} else if (IntentDConnectMessage.ACTION_PUT.equals(action)) {
send = onPutRequest(request, response);
} else if (IntentDConnectMessage.ACTION_DELETE.equals(action)) {
send = onDeleteRequest(request, response);
} else {
mLogger.warning("Unknown action. action=" + action);
MessageUtils.setNotSupportActionError(response);
}
} catch (Exception e) {
mLogger.severe("Exception occurred in the profile. " + e.getMessage());
MessageUtils.setUnknownError(response, e.getMessage());
}
return send;
}
/**
* GETメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
protected boolean onGetRequest(final Intent request, final Intent response) {
MessageUtils.setNotSupportActionError(response);
return true;
}
/**
* POSTメソッドハンドラー.
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
protected boolean onPostRequest(final Intent request, final Intent response) {
MessageUtils.setNotSupportActionError(response);
return true;
}
/**
* PUTメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
protected boolean onPutRequest(final Intent request, final Intent response) {
MessageUtils.setNotSupportActionError(response);
return true;
}
/**
* DELETEメソッドハンドラー.<br>
* リクエストパラメータに応じてデバイスのサービスを提供し、その結果をレスポンスパラメータに格納する。
* レスポンスパラメータの送信準備が出来た場合は返り値にtrueを指定する事。
* 送信準備ができていない場合は、返り値にfalseを指定し、スレッドを立ち上げてそのスレッドで最終的にレスポンスパラメータの送信を行う事。
*
* @param request リクエストパラメータ
* @param response レスポンスパラメータ
* @return レスポンスパラメータを送信するか否か
* @deprecated
*/
protected boolean onDeleteRequest(final Intent request, final Intent response) {
MessageUtils.setNotSupportActionError(response);
return true;
}
/**
* コンテキストの設定する.
*
* @param context コンテキスト
*/
public void setContext(final Context context) {
mContext = context;
}
/**
* コンテキストの取得する.
*
* @return コンテキスト
*/
public Context getContext() {
return mContext;
}
/**
* 指定されたオブジェクトがStringか指定されたNumberクラスかを判定し、指定されたNumberクラスへ変換する.
*
* @param o 値
* @param clazz 型情報
* @param <T> ナンバークラスの型。判定出来るのは {@link Byte}、{@link Short}、{@link Integer}、
* {@link Long}、{@link Float}、{@link Double} のみ。
* @return 指定されたナンバークラスのオブジェクト。変換に失敗した場合はnullを返す。
*/
@SuppressWarnings("unchecked")
private static <T extends Number> Number valueOf(final Object o, final Class<T> clazz) {
if (o == null) {
return null;
}
Number result = null;
if (o instanceof String) {
try {
if (Integer.class.equals(clazz)) {
result = Integer.valueOf((String) o);
} else if (Long.class.equals(clazz)) {
result = Long.valueOf((String) o);
} else if (Double.class.equals(clazz)) {
result = Double.valueOf((String) o);
} else if (Byte.class.equals(clazz)) {
result = Byte.valueOf((String) o);
} else if (Short.class.equals(clazz)) {
result = Short.valueOf((String) o);
} else if (Float.class.equals(clazz)) {
result = Float.valueOf((String) o);
}
} catch (NumberFormatException e) {
result = null;
}
} else if (o.getClass().equals(clazz)) {
result = (T) o;
}
return result;
}
/**
* 指定されたオブジェクトがStringかIntegerかを判定し、Integerへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Integer parseInteger(final Object o) {
Integer res = (Integer) valueOf(o, Integer.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかIntegerかを判定し、Integerへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Integer parseInteger(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Integer res = parseInteger(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかLongかを判定し、Longへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Long parseLong(final Object o) {
Long res = (Long) valueOf(o, Long.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかLongかを判定し、Longへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Long parseLong(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Long res = parseLong(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかDoubleかを判定し、Doubleへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Double parseDouble(final Object o) {
Double res = (Double) valueOf(o, Double.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかDoubleかを判定し、Doubleへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Double parseDouble(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Double res = parseDouble(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかFloatかを判定し、Floatへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Float parseFloat(final Object o) {
Float res = (Float) valueOf(o, Float.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかFloatかを判定し、Floatへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Float parseFloat(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Float res = parseFloat(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかByteかを判定し、Byteへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Byte parseByte(final Object o) {
Byte res = (Byte) valueOf(o, Byte.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかByteかを判定し、Byteへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Byte parseByte(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Byte res = parseByte(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかShortかを判定し、Shortへ変換する.
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Short parseShort(final Object o) {
Short res = (Short) valueOf(o, Short.class);
return res;
}
/**
* Intentの指定されたパラメータがStringかShortかを判定し、Shortへ変換する.
*
* @param intent インテント
* @param key パラメータキー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Short parseShort(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
Short res = parseShort(b.get(key));
return res;
}
/**
* 指定されたオブジェクトがStringかBooleanかを判定し、Booleanへ変換する.
* Stringの場合は、"true"の場合true、"false"の場合falseを返す。その他はnullを返す。
*
* @param o 値
* @return 変換後の値。変換に失敗した場合はnullを返す。
*/
public static Boolean parseBoolean(final Object o) {
if (o instanceof String) {
if (o.equals("true")) {
return Boolean.TRUE;
} else if (o.equals("false")) {
return Boolean.FALSE;
}
} else if (o instanceof Boolean) {
return (Boolean) o;
}
return null;
}
/**
* Intentの指定されたパラメータがStringかBooleanかを判定し、Booleanへ変換する.
* Stringの場合は、"true"の場合true、"false"の場合falseを返す。その他はnullを返す。
*
* @param intent インテント
* @param key キー
* @return 変換後の値。変換に失敗した場合、またはパラメータが無い場合はnullを返す。
*/
public static Boolean parseBoolean(final Intent intent, final String key) {
Bundle b = intent.getExtras();
if (b == null) {
return null;
}
return parseBoolean(b.get(key));
}
/**
* リクエストからサービスIDを取得する.
*
* @param request リクエストパラメータ
* @return サービスID。無い場合はnullを返す。
*/
public static String getServiceID(final Intent request) {
String serviceId = request.getStringExtra(PARAM_SERVICE_ID);
return serviceId;
}
/**
* メッセージにサービスIDを設定する.
*
* @param message メッセージパラメータ
* @param serviceId サービスID
*/
public static void setServiceID(final Intent message, final String serviceId) {
message.putExtra(PARAM_SERVICE_ID, serviceId);
}
/**
* リクエストからAPI名を取得する.
*
* @param request リクエストパラメータ
* @return API名。無い場合はnullを返す。
*/
public static String getApi(final Intent request) {
String api = request.getStringExtra(DConnectMessage.EXTRA_API);
return api;
}
/**
* メッセージにAPI名を設定する.
*
* @param message メッセージパラメータ
* @param api API名
*/
public static void setApi(final Intent message, final String api) {
message.putExtra(DConnectMessage.EXTRA_API, api);
}
/**
* リクエストからプロファイル名を取得する.
*
* @param request リクエストパラメータ
* @return プロファイル名。無い場合はnullを返す。
*/
public static String getProfile(final Intent request) {
String profile = request.getExtras().getString(DConnectMessage.EXTRA_PROFILE);
return profile;
}
/**
* メッセージにプロファイル名を設定する.
*
* @param message メッセージパラメータ
* @param profile プロファイル名
*/
public static void setProfile(final Intent message, final String profile) {
message.putExtra(DConnectMessage.EXTRA_PROFILE, profile);
}
/**
* リクエストからインターフェース名を取得する.
*
* @param request リクエストパラメータsetProfile
* @return インターフェース。無い場合はnullを返す。
*/
public static String getInterface(final Intent request) {
String inter = request.getExtras().getString(DConnectMessage.EXTRA_INTERFACE);
return inter;
}
/**
* メッセージにインターフェース名を設定する.
*
* @param message メッセージパラメータ
* @param inter インターフェース名
*/
public static void setInterface(final Intent message, final String inter) {
message.putExtra(DConnectMessage.EXTRA_INTERFACE, inter);
}
/**
* リクエストから属性名を取得する.
*
* @param request リクエストパラメータ
* @return 属性名。無い場合はnullを返す。
*/
public static String getAttribute(final Intent request) {
String attribute = request.getExtras().getString(DConnectMessage.EXTRA_ATTRIBUTE);
return attribute;
}
/**
* メッセージに属性名を設定する.
*
* @param message メッセージパラメータ
* @param attribute コールバック名
*/
public static void setAttribute(final Intent message, final String attribute) {
message.putExtra(DConnectMessage.EXTRA_ATTRIBUTE, attribute);
}
/**
* レスポンス結果を設定する.
*
* @param response レスポンスパラメータ
* @param result レスポンス結果
*/
public static void setResult(final Intent response, final int result) {
response.putExtra(DConnectMessage.EXTRA_RESULT, result);
}
/**
* レスポンス結果を取得する.
*
* @param response レスポンスパラメータ
* @return レスポンス結果
*/
public static int getResult(final Intent response) {
int result = response.getIntExtra(DConnectMessage.EXTRA_RESULT, -1);
return result;
}
/**
* リクエストからセッションキーを取得する.
*
* @param request リクエストパラメータ
* @return セッションキー。無い場合はnullを返す。
*/
public static String getSessionKey(final Intent request) {
String sessionKey = request.getStringExtra(PARAM_SESSION_KEY);
return sessionKey;
}
/**
* メッセージにセッションキーを設定する.
*
* @param message メッセージパラメータ
* @param sessionKey セッションキー
*/
public static void setSessionKey(final Intent message, final String sessionKey) {
message.putExtra(PARAM_SESSION_KEY, sessionKey);
}
/**
* リクエストからアクセストークンを取得する.
*
* @param request リクエストパラメータ
* @return アクセストークン。無い場合はnullを返す。
*/
public static String getAccessToken(final Intent request) {
String accessToken = request.getStringExtra(DConnectMessage.EXTRA_ACCESS_TOKEN);
return accessToken;
}
/**
* メッセージにアクセストークンを設定する.
*
* @param message メッセージパラメータ
* @param accessToken アクセストークン
*/
public static void setAccessToken(final Intent message, final String accessToken) {
message.putExtra(DConnectMessage.EXTRA_ACCESS_TOKEN, accessToken);
}
/**
* リクエストからDeviceConnectManagerのバージョン名を取得する.
*
* @param request リクエストパラメータ
* @return DeviceConnectManagerのバージョン名。無い場合はnullを返す。
*/
public static String getVersion(final Intent request) {
String version = request.getStringExtra(DConnectMessage.EXTRA_VERSION);
return version;
}
/**
* リクエストからDeviceConnectManagerのバージョン名を設定する.
*
* @param message メッセージパラメータ
* @param version DeviceConnectManagerのバージョン名
*/
public static void setVersion(final Intent message, final String version) {
message.putExtra(DConnectMessage.EXTRA_VERSION, version);
}
/**
* リクエストからDeviceConnectManagerのアプリ名を取得する.
*
* @param request リクエストパラメータ
* @return DeviceConnectManagerのアプリ名。無い場合はnullを返す。
*/
public static String getProduct(final Intent request) {
String product = request.getStringExtra(DConnectMessage.EXTRA_PRODUCT);
return product;
}
/**
* リクエストからDeviceConnectManagerのアプリ名を設定する.
*
* @param message メッセージパラメータ
* @param product DeviceConnectManagerのアプリ名
*/
public static void setProduct(final Intent message, final String product) {
message.putExtra(DConnectMessage.EXTRA_PRODUCT, product);
}
/**
* レスポンスの結果として非サポートエラーを設定する.
*
* @param response レスポンスパラメータ
*/
public static void setUnsupportedError(final Intent response) {
MessageUtils.setNotSupportAttributeError(response);
}
/**
* レスポンスにリクエストコードを設定する.
*
* @param response レスポンスパラメータ
* @param requestCode リクエストコード
*/
public static void setRequestCode(final Intent response, final int requestCode) {
response.putExtra(DConnectMessage.EXTRA_REQUEST_CODE, requestCode);
}
/**
* リクエストからリクエストコードを取得する.
*
* @param request リクエストパラメータ
* @return リクエストコード
*/
public static int getRequestCode(final Intent request) {
return request.getIntExtra(DConnectMessage.EXTRA_REQUEST_CODE, Integer.MIN_VALUE);
}
/**
* レスポンスを返却します.
* @param response レスポンス
*/
protected final void sendResponse(final Intent response) {
((DConnectMessageService) getContext()).sendResponse(response);
}
/**
* イベントを送信します.
* @param event イベント
* @param accessToken アクセストークン
* @return 送信成功の場合true、アクセストークンエラーの場合はfalseを返す。
*/
protected final boolean sendEvent(final Intent event, final String accessToken) {
return ((DConnectMessageService) getContext()).sendEvent(event, accessToken);
}
/**
* イベントを送信します.
* @param event イベント
* @param bundle パラメータ
* @return 送信成功の場合true、アクセストークンエラーの場合はfalseを返す。
*/
protected final boolean sendEvent(final Event event, final Bundle bundle) {
return ((DConnectMessageService) getContext()).sendEvent(event, bundle);
}
/**
* コンテンツデータを取得する.
*
* @param uri URI
* @return コンテンツデータ
*/
protected final byte[] getContentData(final String uri) {
if (uri == null) {
return null;
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
InputStream in = null;
byte[] buf = new byte[BUF_SIZE];
int len;
try {
ContentResolver r = getContext().getContentResolver();
in = r.openInputStream(Uri.parse(uri));
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
return out.toByteArray();
} catch (IOException e) {
return null;
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
private static class ApiIdentifier {
private final String mPath;
private final DConnectApiSpec.Method mMethod;
public ApiIdentifier(final String path, final DConnectApiSpec.Method method) {
if (path == null) {
throw new IllegalArgumentException("path is null.");
}
if (method == null) {
throw new IllegalArgumentException("method is null.");
}
mPath = path;
mMethod = method;
}
public ApiIdentifier(final String path, final String method) {
this(path, DConnectApiSpec.Method.parse(method));
}
@Override
public int hashCode() {
return (mPath + mMethod.getName()).hashCode();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ApiIdentifier)) {
return false;
}
ApiIdentifier that = ((ApiIdentifier) o);
return mPath.equals(that.mPath) && mMethod == that.mMethod;
}
}
}
| リファクタリング。
| dConnectDevicePlugin/dConnectDevicePluginSDK/dconnect-device-plugin-sdk/src/main/java/org/deviceconnect/android/profile/DConnectProfile.java | リファクタリング。 | <ide><path>ConnectDevicePlugin/dConnectDevicePluginSDK/dconnect-device-plugin-sdk/src/main/java/org/deviceconnect/android/profile/DConnectProfile.java
<ide>
<ide> public List<DConnectApi> getApiList() {
<ide> List<DConnectApi> list = new ArrayList<DConnectApi>();
<del> for (DConnectApi api : mApis.values()) {
<del> list.add(api);
<del> }
<add> list.addAll(mApis.values());
<ide> return list;
<ide> }
<ide> |
|
Java | unlicense | error: pathspec 'src/com/dydra/Statement.java' did not match any file(s) known to git
| ecb8fe8031fc437a4fd7b107d2129bc2896e3a52 | 1 | dydra/dydra.java,dydra/dydra.java | /* This is free and unencumbered software released into the public domain. */
package com.dydra;
/**
* Represents a statement stored in a Dydra.com repository.
*
* @see http://docs.dydra.com/sdk/java
*/
public class Statement implements Identifiable, Comparable<Statement> {
/**
* The statement identifier.
*/
public final Identifier id;
/**
* The statement's subject term.
*/
public Identifiable subject;
/**
* The statement's predicate term.
*/
public Identifiable predicate;
/**
* The statement's object term.
*/
public Identifiable object;
/**
* Constructs a statement from the given identifier.
*
* @param id a statement identifier
*/
public Statement(final Identifier id) {
this.id = id;
}
/**
* Constructs a statement from the given identifier and terms.
*
* @param id a statement identifier
* @param s the subject term
* @param p the predicate term
* @param o the object term
*/
public Statement(final Identifier id,
final Identifiable s, final Identifiable p, final Identifiable o) {
this.id = id;
this.subject = s;
this.predicate = p;
this.object = o;
}
/**
* Indicates whether some other object is "equal to" this one.
*
* @param other the object to compare this statement against
* @return <code>true</code> if the given object is equivalent to this
* statement, <code>false</code> otherwise
*/
@Override
public boolean equals(final Object other) {
return (other instanceof Statement) && equals((Statement)other);
}
/**
* Indicates whether another statement is equal to this one.
*
* @param other the statement to compare this statement against
* @return <code>true</code> if the given statement is equivalent to this
* statement, <code>false</code> otherwise
*/
public boolean equals(final Statement other) {
return this.toIdentifier().equals(other.toIdentifier());
}
/**
* Returns the hash code for this statement.
*
* @return a hash code value
*/
@Override
public int hashCode() {
return this.toIdentifier().hashCode();
}
/**
* Returns a string representation of this statement.
*
* @return a hexadecimal string of length <code>Identifier.LENGTH</code>
*/
@Override
public String toString() {
return this.toIdentifier().toString(); // TODO
}
/**
* Returns the identifier for this statement.
*
* @return a Dydra.com identifier
*/
public Identifier toIdentifier() {
return this.id;
}
/**
* Compares this statement to another statement.
*
* @param other the statement to compare this statement against
* @return a negative integer, zero, or a positive integer as this
* statement is less than, equal to, or greater than the given
* statement
*/
public int compareTo(final Statement other) {
return toIdentifier().compareTo(other.toIdentifier());
}
}
| src/com/dydra/Statement.java | Implemented the com.dydra.Statement class.
| src/com/dydra/Statement.java | Implemented the com.dydra.Statement class. | <ide><path>rc/com/dydra/Statement.java
<add>/* This is free and unencumbered software released into the public domain. */
<add>
<add>package com.dydra;
<add>
<add>/**
<add> * Represents a statement stored in a Dydra.com repository.
<add> *
<add> * @see http://docs.dydra.com/sdk/java
<add> */
<add>public class Statement implements Identifiable, Comparable<Statement> {
<add> /**
<add> * The statement identifier.
<add> */
<add> public final Identifier id;
<add>
<add> /**
<add> * The statement's subject term.
<add> */
<add> public Identifiable subject;
<add>
<add> /**
<add> * The statement's predicate term.
<add> */
<add> public Identifiable predicate;
<add>
<add> /**
<add> * The statement's object term.
<add> */
<add> public Identifiable object;
<add>
<add> /**
<add> * Constructs a statement from the given identifier.
<add> *
<add> * @param id a statement identifier
<add> */
<add> public Statement(final Identifier id) {
<add> this.id = id;
<add> }
<add>
<add> /**
<add> * Constructs a statement from the given identifier and terms.
<add> *
<add> * @param id a statement identifier
<add> * @param s the subject term
<add> * @param p the predicate term
<add> * @param o the object term
<add> */
<add> public Statement(final Identifier id,
<add> final Identifiable s, final Identifiable p, final Identifiable o) {
<add> this.id = id;
<add> this.subject = s;
<add> this.predicate = p;
<add> this.object = o;
<add> }
<add>
<add> /**
<add> * Indicates whether some other object is "equal to" this one.
<add> *
<add> * @param other the object to compare this statement against
<add> * @return <code>true</code> if the given object is equivalent to this
<add> * statement, <code>false</code> otherwise
<add> */
<add> @Override
<add> public boolean equals(final Object other) {
<add> return (other instanceof Statement) && equals((Statement)other);
<add> }
<add>
<add> /**
<add> * Indicates whether another statement is equal to this one.
<add> *
<add> * @param other the statement to compare this statement against
<add> * @return <code>true</code> if the given statement is equivalent to this
<add> * statement, <code>false</code> otherwise
<add> */
<add> public boolean equals(final Statement other) {
<add> return this.toIdentifier().equals(other.toIdentifier());
<add> }
<add>
<add> /**
<add> * Returns the hash code for this statement.
<add> *
<add> * @return a hash code value
<add> */
<add> @Override
<add> public int hashCode() {
<add> return this.toIdentifier().hashCode();
<add> }
<add>
<add> /**
<add> * Returns a string representation of this statement.
<add> *
<add> * @return a hexadecimal string of length <code>Identifier.LENGTH</code>
<add> */
<add> @Override
<add> public String toString() {
<add> return this.toIdentifier().toString(); // TODO
<add> }
<add>
<add> /**
<add> * Returns the identifier for this statement.
<add> *
<add> * @return a Dydra.com identifier
<add> */
<add> public Identifier toIdentifier() {
<add> return this.id;
<add> }
<add>
<add> /**
<add> * Compares this statement to another statement.
<add> *
<add> * @param other the statement to compare this statement against
<add> * @return a negative integer, zero, or a positive integer as this
<add> * statement is less than, equal to, or greater than the given
<add> * statement
<add> */
<add> public int compareTo(final Statement other) {
<add> return toIdentifier().compareTo(other.toIdentifier());
<add> }
<add>} |
|
Java | bsd-2-clause | 137372051353b9f88bdbaf52461863f8a1ec947b | 0 | biovoxxel/imagej,TehSAUCE/imagej,biovoxxel/imagej,TehSAUCE/imagej,TehSAUCE/imagej,biovoxxel/imagej | //
// NativeLibraryUtil.java
//
/*
ImageJ software for multidimensional image processing and analysis.
Copyright (c) 2010, ImageJDev.org.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the names of the ImageJDev.org developers nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package imagej.nativelibrary;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import ij.IJ;
import loci.wapmx.nativeutils.jniloader.DefaultJniExtractor;
import loci.wapmx.nativeutils.jniloader.JniExtractor;
/**
*
* @author Aivar Grislis
*/
public class NativeLibraryUtil {
public static enum Architecture
{ UNKNOWN, LINUX_32, LINUX_64, WINDOWS_32, WINDOWS_64, OSX_32, OSX_64, OSX_PPC };
private static enum Processor
{ UNKNOWN, INTEL_32, INTEL_64, PPC };
private static Architecture s_architecture = Architecture.UNKNOWN;
private static final String DELIM = "/";
private static final String USER_TMPDIR = "java.library.tmpdir";
private static final String JAVA_TMPDIR = "java.io.tmpdir";
private static final String JAVA_PATH = "java.library.path";
private static final String SUN_PATH = "sun.boot.library.path";
private static final String USER_PATHS = "usr_paths";
private static final String CURRENT_DIRECTORY = ".";
private static boolean s_skipHack = false;
private static String s_writableDirectory = null;
/**
* Determines the underlying hardward platform and architecture.
*
* @return enumerated architecture value
*/
public static Architecture getArchitecture() {
if (Architecture.UNKNOWN == s_architecture) {
Processor processor = getProcessor();
if (Processor.UNKNOWN != processor) {
String name = System.getProperty("os.name").toLowerCase();
if (name.indexOf("nix") >= 0 || name.indexOf("nux") >= 0) {
if (Processor.INTEL_32 == processor) {
s_architecture = Architecture.LINUX_32;
}
else if (Processor.INTEL_64 == processor) {
s_architecture = Architecture.LINUX_64;
}
}
else if (name.indexOf("win") >= 0) {
if (Processor.INTEL_32 == processor) {
s_architecture = Architecture.WINDOWS_32;
}
else if (Processor.INTEL_64 == processor) {
s_architecture = Architecture.WINDOWS_64;
}
}
else if (name.indexOf("mac") >= 0) {
if (Processor.INTEL_32 == processor) {
s_architecture = Architecture.OSX_32;
}
else if (Processor.INTEL_64 == processor) {
s_architecture = Architecture.OSX_64;
}
else if (Processor.PPC == processor) {
s_architecture = Architecture.OSX_PPC;
}
}
}
}
return s_architecture;
}
/**
* Determines what processor is in use.
*
* @return
*/
private static Processor getProcessor() {
Processor processor = Processor.UNKNOWN;
int bits;
// Note that this is actually the architecture of the installed JVM.
String arch = System.getProperty("os.arch").toLowerCase();
if (arch.indexOf("ppc") >= 0) {
processor = Processor.PPC;
}
else if (arch.indexOf("86") >= 0 || arch.indexOf("amd") >= 0) {
bits = 32;
if (arch.indexOf("64") >= 0) {
bits = 64;
}
processor = (32 == bits) ? Processor.INTEL_32 : Processor.INTEL_64;
}
return processor;
}
/**
* Returns the path to the native library.
*
* @return path
*/
public static String getPlatformLibraryPath() {
String path = "META-INF" + DELIM + "lib" + DELIM;
switch (getArchitecture()) {
case LINUX_32:
path += "i386-Linux-g++";
break;
case LINUX_64:
path += "x86_64-Linux-g++";
break;
case WINDOWS_32:
case WINDOWS_64:
path += "x86-Windows-msvc";
break;
case OSX_32:
path += "i386-MacOSX-gpp";
break;
case OSX_64:
path += "x86_64-MacOSX-gpp";
break;
case OSX_PPC:
path += "ppc-MacOSX-gpp";
break;
}
return path + DELIM;
}
/**
* Returns the full file name (without path) of the native library.
*
* @param libName
* @return file name
*/
public static String getPlatformLibraryName(String libName) {
libName = getVersionedLibraryName(libName);
String name = null;
switch (getArchitecture()) {
case LINUX_32:
case LINUX_64:
name = libName + ".so";
break;
case WINDOWS_32:
case WINDOWS_64:
name = libName + ".dll";
break;
case OSX_32:
case OSX_64:
name = "lib" + libName + ".dylib";
break;
}
return name;
}
/**
* Returns the Maven-versioned file name of the native library.
*
* Note: With the Nar Plugin the class NarSystem.java is built for the
* client of this native library and takes care of this versioning
* hardcoding. If the client is "-1.0-SNAPSHOT" so should the native
* library be the same version.
*
* @param libName
* @return
*/
//TODO This shouldn't be hardcoded in the general-purpose utility class.
//TODO Couldn't we just get rid of this version label altogether?
public static String getVersionedLibraryName(String libName) {
return libName + "-1.0-SNAPSHOT";
}
/**
* Loads the native library.
*
* @param libraryJarClass any class within the library-containing jar
* @param libName name of library
* @return whether or not successful
*/
public static boolean loadNativeLibrary(Class libraryJarClass, String libName) {
boolean success = false;
if (Architecture.UNKNOWN == getArchitecture()) {
IJ.log("No native library available for this platform.");
}
else {
try
{
// will extract library to temporary directory
String tmpDirectory = System.getProperty(JAVA_TMPDIR);
JniExtractor jniExtractor =
new DefaultJniExtractor(libraryJarClass, tmpDirectory);
// do extraction
File extractedFile = jniExtractor.extractJni
(getPlatformLibraryPath(), getVersionedLibraryName(libName));
// load extracted library from temporary directory
System.load(extractedFile.getPath());
success = true;
}
catch (IOException e)
{
IJ.log("IOException creating DefaultJniExtractor " + e.getMessage());
}
catch (SecurityException e)
{
IJ.log("Can't load dynamic library " + e.getMessage());
}
catch (UnsatisfiedLinkError e)
{
IJ.log("Libary does not exists " + e.getMessage());
}
}
return success;
}
/**
* Loads the native library specified by the libname argument.
* Can be used in place of System.loadLibrary().
* Extracts
*/
public static void loadLibrary(Class libraryJarClass, String libname) {
extractNativeLibraryToPath(libraryJarClass, libname);
System.loadLibrary(libname);
}
/**
* Extracts the native library specified by the libname argument from the
* resources of the jar file that contains the given libraryJarClass class.
* Puts it on the library path.
*
* @param libraryJarClass
* @param libname
* @return
*/
public static boolean extractNativeLibraryToPath(Class libraryJarClass, String libname) {
boolean success = false;
try {
// get a temporary directory
boolean userSuppliedDirectory = true;
String directory = System.getProperty(USER_TMPDIR);
if (null == directory) {
userSuppliedDirectory = false;
directory = System.getProperty(JAVA_TMPDIR);
}
// if we should try the hack
if (!s_skipHack) {
// is it necessary? already on path?
if (!isOnLibraryPath(directory)) {
// try the hack
if (!addToLibraryPath(directory)) {
// fails, don't try again
s_skipHack = true;
}
}
}
// if hack doesn't work
if (s_skipHack) {
// go with user supplied directory
if (!userSuppliedDirectory) {
// otherwise, find a directory on the path to extract to
directory = findWritableDirectoryOnPath();
}
}
// extract library to directory
if (null != directory) {
try {
JniExtractor jniExtractor =
new DefaultJniExtractor(libraryJarClass, directory);
File extractedFile = jniExtractor.extractJni("", libname); //TODO pass in libary path or get rid of this method
success = true;
}
catch (IOException e) {
System.out.println("IOException creating DefaultJniExtractor " + e.getMessage());
}
}
}
catch (SecurityException e) {
// a security manager exists and its checkPropertyAccess method
// doesn't allow access to the specified system property.
}
return success;
}
/**
* Is the given directory on java.library.path?
*
* @param directory
* @return whether or not on path
*/
public static boolean isOnLibraryPath(String directory) {
return checkLibraryPath(JAVA_PATH, directory)
|| checkLibraryPath(SUN_PATH, directory);
}
/**
* Helper routine, checks path for a given property name.
*
* @param propertyName
* @param directory
* @return whether or not on path
*/
private static boolean checkLibraryPath(String propertyName, String directory) {
String paths[] = getPaths(propertyName);
for (String path : paths) {
System.out.println(path);
if (directory.equals(path)) {
return true;
}
}
return false;
}
/**
* Helper routine, gets list of paths for a given property name.
*
* @param propertyName
* @return list of paths
*/
private static String[] getPaths(String propertyName) {
String paths[] = null;
try {
paths = System.getProperty(propertyName).split(File.pathSeparator);
}
catch (SecurityException e) {
// unable to get list of paths
paths = new String[0];
}
return paths;
}
/**
* Adds a given folder to the java.library.path.
*
* From {@link http://nicklothian.com/blog/2008/11/19/modify-javalibrarypath-at-runtime/}
*
* "This enables the java.library.path to be modified at runtime. From a
* Sun engineer at http://forums.sun.com/thread.jspa?threadID=707176" (link
* is dead)
*
* See also {@link http://forums.java.net/node/703790}
*
* "So here's what I found. I decompiled the RV library, and used that to
* step into the Java classloader code. There I found two variables being
* used to look for native libraries: sys_paths and usr_paths. Usr_paths
* *appears* to be loaded from the environment variable 'java.library.path'
* and sys_paths *appears* to be loaded from the environment variable
* 'sun.boot.library.path'."
*
* See also {@link http://safcp.googlecode.com/svn/trunk/SAFCP/src/main/java/ufrj/safcp/util/JavaLibraryPath.java}
*
* Uses similar approach, GPL3 license: "Will not work if JVM security
* policy gets in the way (like in an applet). Will not work if Sun changes
* the private members. This really shouldn't be used at all."
*
* @param directory folder to add, should be absolute path
* @return whether successful
*/
public static boolean addToLibraryPath(String directory) {
boolean success = false;
try {
// get user paths
Field field = ClassLoader.class.getDeclaredField(USER_PATHS);
field.setAccessible(true);
String[] paths = (String[])field.get(null);
// already in paths?
for (int i = 0; i < paths.length; i++) {
if (directory.equals(paths[i])) {
return true;
}
}
// add to user paths
String[] tmp = new String[paths.length+1];
System.arraycopy(paths,0,tmp,0,paths.length);
tmp[paths.length] = directory;
field.set(null,tmp);
System.setProperty(JAVA_PATH,
System.getProperty(JAVA_PATH) + File.pathSeparator + directory); //TODO why bother?
success = true;
}
catch (IllegalAccessException e) {
// Failed to get permissions to set library path
}
catch (NoSuchFieldException e) {
// Failed to get field handle to set library path
}
catch (Exception e) {
// play it safe
}
return success;
}
public static String findWritableDirectoryOnPath() {
// if we haven't found this already
if (null == s_writableDirectory) {
// try the current directory first
if (isOnLibraryPath(CURRENT_DIRECTORY)) {
// is on path, is it writable?
if (isWritableDirectory(CURRENT_DIRECTORY)) {
// yes, use it
s_writableDirectory = CURRENT_DIRECTORY;
}
}
// still looking?
if (null == s_writableDirectory) {
// look on java library path
s_writableDirectory = findWritableDirectory(JAVA_PATH);
// still looking?
if (null == s_writableDirectory) {
// look on Sun library path
s_writableDirectory = findWritableDirectory(SUN_PATH);
}
}
}
return s_writableDirectory;
}
/**
* Helper routine, checks path for a given property name.
*
* @param propertyName
* @param directory
* @return whether or not on path
*/
private static String findWritableDirectory(String propertyName) {
String paths[] = getPaths(propertyName);
for (String path : paths) {
if (isWritableDirectory(path)) {
return path;
}
}
return null;
}
/**
* Do we have write access to the given directory?
*
* @param directory
* @return whether or not writable
*/
public static boolean isWritableDirectory(String directory) {
boolean success = false;
try {
File tempFile = File.createTempFile("dummy", null, new File(directory));
tempFile.deleteOnExit();
success = true;
}
catch (IOException e) {
// file could not be created
}
catch (SecurityException e) {
// security manager exists and checkWrite method does not allow a file to be created
}
return success;
}
}
| extra/native-library-util/src/main/java/imagej/nativelibrary/NativeLibraryUtil.java | //
// NativeLibraryUtil.java
//
/*
ImageJ software for multidimensional image processing and analysis.
Copyright (c) 2010, ImageJDev.org.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the names of the ImageJDev.org developers nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package imagej.nativelibrary;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import ij.IJ;
import loci.wapmx.nativeutils.jniloader.DefaultJniExtractor;
import loci.wapmx.nativeutils.jniloader.JniExtractor;
/**
*
* @author Aivar Grislis
*/
public class NativeLibraryUtil {
public static enum Architecture
{ UNKNOWN, LINUX_32, LINUX_64, WINDOWS_32, WINDOWS_64, OSX_32, OSX_64, };
private static Architecture s_architecture = Architecture.UNKNOWN;
private static final String DELIM = "/";
private static final String USER_TMPDIR = "java.library.tmpdir";
private static final String JAVA_TMPDIR = "java.io.tmpdir";
private static final String JAVA_PATH = "java.library.path";
private static final String SUN_PATH = "sun.boot.library.path";
private static final String USER_PATHS = "usr_paths";
private static final String CURRENT_DIRECTORY = ".";
private static boolean s_skipHack = false;
private static String s_writableDirectory = null;
public static String getOsArchString() {
String returnValue = null;
String name = System.getProperty("os.name").toLowerCase();
String arch = System.getProperty("os.arch").toLowerCase();
if (name.indexOf("mac") > 0) {
returnValue = "osx";
}
else if (name.indexOf("win") > 0) {
returnValue = "windows";
}
else if (name.indexOf("nix") > 0 || name.indexOf("nux") > 0) {
returnValue = "linux";
}
if (null != returnValue) {
// Note that this is actually the architecture of the installed JVM.
if (arch.indexOf("86") > 0 || arch.indexOf("amd") > 0) {
int bits = 32;
if (arch.indexOf("64") > 0) {
bits = 64;
}
returnValue += Integer.toString(bits);
}
else {
returnValue = null;
}
}
return returnValue;
}
/**
* Determines the underlying hardward platform and architecture.
*
* @return enumerated architecture value
*/
public static Architecture getArchitecture() {
if (Architecture.UNKNOWN == s_architecture) {
int bits = 0;
// Note that this is actually the architecture of the installed JVM.
String arch = System.getProperty("os.arch").toLowerCase();
if (arch.indexOf("86") > 0 || arch.indexOf("amd") > 0) {
bits = 32;
if (arch.indexOf("64") > 0) {
bits = 64;
}
}
if (bits > 0) {
String name = System.getProperty("os.name").toLowerCase();
if (name.indexOf("nix") >= 0 || name.indexOf("nux") > 0) {
s_architecture = (32 == bits)
? Architecture.LINUX_32
: Architecture.LINUX_64;
}
else if (name.indexOf("win") >= 0) {
s_architecture = (32 == bits)
? Architecture.WINDOWS_32
: Architecture.WINDOWS_64;
}
else if (name.indexOf("mac") >= 0) {
s_architecture = (32 == bits)
? Architecture.OSX_32
: Architecture.OSX_64;
}
}
}
return s_architecture;
}
/**
* Returns the path to the native library.
*
* @return path
*/
public static String getPlatformLibraryPath() {
String path = "META-INF" + DELIM + "lib" + DELIM;
switch (getArchitecture()) {
case LINUX_32:
case LINUX_64:
path += "i386-Linux-g++";
break;
case WINDOWS_32:
case WINDOWS_64:
path += "x86-Windows-msvc";
break;
case OSX_32:
path += "x86_32-MacOSX-gpp";
break;
case OSX_64:
path += "x86_64-MacOSX-gpp";
break;
}
return path + DELIM;
}
/**
* Returns the full file name (without path) of the native library.
*
* @param libName
* @return file name
*/
public static String getPlatformLibraryName(String libName) {
libName = getVersionedLibraryName(libName);
String name = null;
switch (getArchitecture()) {
case LINUX_32:
case LINUX_64:
name = libName + ".so";
break;
case WINDOWS_32:
case WINDOWS_64:
name = libName + ".dll";
break;
case OSX_32:
case OSX_64:
name = "lib" + libName + ".dylib";
break;
}
return name;
}
/**
* Returns the Maven-versioned file name of the native library.
*
* Note: With the Nar Plugin the class NarSystem.java is built for the
* client of this native library and takes care of this versioning
* hardcoding. If the client is "-1.0-SNAPSHOT" so should the native
* library be the same version.
*
* @param libName
* @return
*/
//TODO This shouldn't be hardcoded in the general-purpose utility class.
//TODO Couldn't we just get rid of this version label altogether?
public static String getVersionedLibraryName(String libName) {
return libName + "-1.0-SNAPSHOT";
}
/**
* Loads the native library.
*
* @param libraryJarClass any class within the library-containing jar
* @param libName name of library
* @return whether or not successful
*/
public static boolean loadNativeLibrary(Class libraryJarClass, String libName) {
boolean success = false;
if (Architecture.UNKNOWN == getArchitecture()) {
IJ.log("No native library available for this platform.");
}
else {
try
{
// will extract library to temporary directory
String tmpDirectory = System.getProperty(JAVA_TMPDIR);
JniExtractor jniExtractor =
new DefaultJniExtractor(libraryJarClass, tmpDirectory);
// do extraction
File extractedFile = jniExtractor.extractJni
(getPlatformLibraryPath(), getVersionedLibraryName(libName));
// load extracted library from temporary directory
System.load(extractedFile.getPath());
success = true;
}
catch (IOException e)
{
IJ.log("IOException creating DefaultJniExtractor " + e.getMessage());
}
catch (SecurityException e)
{
IJ.log("Can't load dynamic library " + e.getMessage());
}
catch (UnsatisfiedLinkError e)
{
IJ.log("Libary does not exists " + e.getMessage());
}
}
return success;
}
/**
* Loads the native library specified by the libname argument.
* Can be used in place of System.loadLibrary().
* Extracts
*/
public static void loadLibrary(Class libraryJarClass, String libname) {
extractNativeLibraryToPath(libraryJarClass, libname);
System.loadLibrary(libname);
}
/**
* Extracts the native library specified by the libname argument from the
* resources of the jar file that contains the given libraryJarClass class.
* Puts it on the library path.
*
* @param libraryJarClass
* @param libname
* @return
*/
public static boolean extractNativeLibraryToPath(Class libraryJarClass, String libname) {
boolean success = false;
try {
// get a temporary directory
boolean userSuppliedDirectory = true;
String directory = System.getProperty(USER_TMPDIR);
if (null == directory) {
userSuppliedDirectory = false;
directory = System.getProperty(JAVA_TMPDIR);
}
// if we should try the hack
if (!s_skipHack) {
// is it necessary? already on path?
if (!isOnLibraryPath(directory)) {
// try the hack
if (!addToLibraryPath(directory)) {
// fails, don't try again
s_skipHack = true;
}
}
}
// if hack doesn't work
if (s_skipHack) {
// go with user supplied directory
if (!userSuppliedDirectory) {
// otherwise, find a directory on the path to extract to
directory = findWritableDirectoryOnPath();
}
}
// extract library to directory
if (null != directory) {
try {
JniExtractor jniExtractor =
new DefaultJniExtractor(libraryJarClass, directory);
File extractedFile = jniExtractor.extractJni("", libname); //TODO pass in libary path or get rid of this method
success = true;
}
catch (IOException e) {
System.out.println("IOException creating DefaultJniExtractor " + e.getMessage());
}
}
}
catch (SecurityException e) {
// a security manager exists and its checkPropertyAccess method
// doesn't allow access to the specified system property.
}
return success;
}
/**
* Is the given directory on java.library.path?
*
* @param directory
* @return whether or not on path
*/
public static boolean isOnLibraryPath(String directory) {
return checkLibraryPath(JAVA_PATH, directory)
|| checkLibraryPath(SUN_PATH, directory);
}
/**
* Helper routine, checks path for a given property name.
*
* @param propertyName
* @param directory
* @return whether or not on path
*/
private static boolean checkLibraryPath(String propertyName, String directory) {
String paths[] = getPaths(propertyName);
for (String path : paths) {
System.out.println(path);
if (directory.equals(path)) {
return true;
}
}
return false;
}
/**
* Helper routine, gets list of paths for a given property name.
*
* @param propertyName
* @return list of paths
*/
private static String[] getPaths(String propertyName) {
String paths[] = null;
try {
paths = System.getProperty(propertyName).split(File.pathSeparator);
}
catch (SecurityException e) {
// unable to get list of paths
paths = new String[0];
}
return paths;
}
/**
* Adds a given folder to the java.library.path.
*
* From {@link http://nicklothian.com/blog/2008/11/19/modify-javalibrarypath-at-runtime/}
*
* "This enables the java.library.path to be modified at runtime. From a
* Sun engineer at http://forums.sun.com/thread.jspa?threadID=707176" (link
* is dead)
*
* See also {@link http://forums.java.net/node/703790}
*
* "So here's what I found. I decompiled the RV library, and used that to
* step into the Java classloader code. There I found two variables being
* used to look for native libraries: sys_paths and usr_paths. Usr_paths
* *appears* to be loaded from the environment variable 'java.library.path'
* and sys_paths *appears* to be loaded from the environment variable
* 'sun.boot.library.path'."
*
* See also {@link http://safcp.googlecode.com/svn/trunk/SAFCP/src/main/java/ufrj/safcp/util/JavaLibraryPath.java}
*
* Uses similar approach, GPL3 license: "Will not work if JVM security
* policy gets in the way (like in an applet). Will not work if Sun changes
* the private members. This really shouldn't be used at all."
*
* @param directory folder to add, should be absolute path
* @return whether successful
*/
public static boolean addToLibraryPath(String directory) {
boolean success = false;
try {
// get user paths
Field field = ClassLoader.class.getDeclaredField(USER_PATHS);
field.setAccessible(true);
String[] paths = (String[])field.get(null);
// already in paths?
for (int i = 0; i < paths.length; i++) {
if (directory.equals(paths[i])) {
return true;
}
}
// add to user paths
String[] tmp = new String[paths.length+1];
System.arraycopy(paths,0,tmp,0,paths.length);
tmp[paths.length] = directory;
field.set(null,tmp);
System.setProperty(JAVA_PATH,
System.getProperty(JAVA_PATH) + File.pathSeparator + directory); //TODO why bother?
success = true;
}
catch (IllegalAccessException e) {
// Failed to get permissions to set library path
}
catch (NoSuchFieldException e) {
// Failed to get field handle to set library path
}
catch (Exception e) {
// play it safe
}
return success;
}
public static String findWritableDirectoryOnPath() {
// if we haven't found this already
if (null == s_writableDirectory) {
// try the current directory first
if (isOnLibraryPath(CURRENT_DIRECTORY)) {
// is on path, is it writable?
if (isWritableDirectory(CURRENT_DIRECTORY)) {
// yes, use it
s_writableDirectory = CURRENT_DIRECTORY;
}
}
// still looking?
if (null == s_writableDirectory) {
// look on java library path
s_writableDirectory = findWritableDirectory(JAVA_PATH);
// still looking?
if (null == s_writableDirectory) {
// look on Sun library path
s_writableDirectory = findWritableDirectory(SUN_PATH);
}
}
}
return s_writableDirectory;
}
/**
* Helper routine, checks path for a given property name.
*
* @param propertyName
* @param directory
* @return whether or not on path
*/
private static String findWritableDirectory(String propertyName) {
String paths[] = getPaths(propertyName);
for (String path : paths) {
if (isWritableDirectory(path)) {
return path;
}
}
return null;
}
/**
* Do we have write access to the given directory?
*
* @param directory
* @return whether or not writable
*/
public static boolean isWritableDirectory(String directory) {
boolean success = false;
try {
File tempFile = File.createTempFile("dummy", null, new File(directory));
tempFile.deleteOnExit();
success = true;
}
catch (IOException e) {
// file could not be created
}
catch (SecurityException e) {
// security manager exists and checkWrite method does not allow a file to be created
}
return success;
}
}
| Got bitten by the "source string".indexOf("thing you're looking for") > 0 again. S/b >= 0.
This used to be revision r2679.
| extra/native-library-util/src/main/java/imagej/nativelibrary/NativeLibraryUtil.java | Got bitten by the "source string".indexOf("thing you're looking for") > 0 again. S/b >= 0. | <ide><path>xtra/native-library-util/src/main/java/imagej/nativelibrary/NativeLibraryUtil.java
<ide> */
<ide> public class NativeLibraryUtil {
<ide> public static enum Architecture
<del> { UNKNOWN, LINUX_32, LINUX_64, WINDOWS_32, WINDOWS_64, OSX_32, OSX_64, };
<add> { UNKNOWN, LINUX_32, LINUX_64, WINDOWS_32, WINDOWS_64, OSX_32, OSX_64, OSX_PPC };
<add> private static enum Processor
<add> { UNKNOWN, INTEL_32, INTEL_64, PPC };
<ide> private static Architecture s_architecture = Architecture.UNKNOWN;
<ide> private static final String DELIM = "/";
<ide> private static final String USER_TMPDIR = "java.library.tmpdir";
<ide> private static boolean s_skipHack = false;
<ide> private static String s_writableDirectory = null;
<ide>
<del> public static String getOsArchString() {
<del> String returnValue = null;
<del> String name = System.getProperty("os.name").toLowerCase();
<del> String arch = System.getProperty("os.arch").toLowerCase();
<del> if (name.indexOf("mac") > 0) {
<del> returnValue = "osx";
<del> }
<del> else if (name.indexOf("win") > 0) {
<del> returnValue = "windows";
<del>
<del> }
<del> else if (name.indexOf("nix") > 0 || name.indexOf("nux") > 0) {
<del> returnValue = "linux";
<del> }
<del> if (null != returnValue) {
<del> // Note that this is actually the architecture of the installed JVM.
<del> if (arch.indexOf("86") > 0 || arch.indexOf("amd") > 0) {
<del> int bits = 32;
<del> if (arch.indexOf("64") > 0) {
<del> bits = 64;
<del> }
<del> returnValue += Integer.toString(bits);
<del> }
<del> else {
<del> returnValue = null;
<del> }
<del> }
<del> return returnValue;
<del> }
<del>
<ide> /**
<ide> * Determines the underlying hardward platform and architecture.
<ide> *
<ide> */
<ide> public static Architecture getArchitecture() {
<ide> if (Architecture.UNKNOWN == s_architecture) {
<del> int bits = 0;
<del> // Note that this is actually the architecture of the installed JVM.
<del> String arch = System.getProperty("os.arch").toLowerCase();
<del> if (arch.indexOf("86") > 0 || arch.indexOf("amd") > 0) {
<del> bits = 32;
<del> if (arch.indexOf("64") > 0) {
<del> bits = 64;
<del> }
<del> }
<del> if (bits > 0) {
<add> Processor processor = getProcessor();
<add> if (Processor.UNKNOWN != processor) {
<ide> String name = System.getProperty("os.name").toLowerCase();
<del> if (name.indexOf("nix") >= 0 || name.indexOf("nux") > 0) {
<del> s_architecture = (32 == bits)
<del> ? Architecture.LINUX_32
<del> : Architecture.LINUX_64;
<add> if (name.indexOf("nix") >= 0 || name.indexOf("nux") >= 0) {
<add> if (Processor.INTEL_32 == processor) {
<add> s_architecture = Architecture.LINUX_32;
<add> }
<add> else if (Processor.INTEL_64 == processor) {
<add> s_architecture = Architecture.LINUX_64;
<add> }
<ide> }
<ide> else if (name.indexOf("win") >= 0) {
<del> s_architecture = (32 == bits)
<del> ? Architecture.WINDOWS_32
<del> : Architecture.WINDOWS_64;
<add> if (Processor.INTEL_32 == processor) {
<add> s_architecture = Architecture.WINDOWS_32;
<add> }
<add> else if (Processor.INTEL_64 == processor) {
<add> s_architecture = Architecture.WINDOWS_64;
<add> }
<ide> }
<ide> else if (name.indexOf("mac") >= 0) {
<del> s_architecture = (32 == bits)
<del> ? Architecture.OSX_32
<del> : Architecture.OSX_64;
<add> if (Processor.INTEL_32 == processor) {
<add> s_architecture = Architecture.OSX_32;
<add> }
<add> else if (Processor.INTEL_64 == processor) {
<add> s_architecture = Architecture.OSX_64;
<add> }
<add> else if (Processor.PPC == processor) {
<add> s_architecture = Architecture.OSX_PPC;
<add> }
<ide> }
<ide> }
<ide> }
<ide> return s_architecture;
<add> }
<add>
<add> /**
<add> * Determines what processor is in use.
<add> *
<add> * @return
<add> */
<add> private static Processor getProcessor() {
<add> Processor processor = Processor.UNKNOWN;
<add> int bits;
<add>
<add> // Note that this is actually the architecture of the installed JVM.
<add> String arch = System.getProperty("os.arch").toLowerCase();
<add>
<add> if (arch.indexOf("ppc") >= 0) {
<add> processor = Processor.PPC;
<add> }
<add> else if (arch.indexOf("86") >= 0 || arch.indexOf("amd") >= 0) {
<add> bits = 32;
<add> if (arch.indexOf("64") >= 0) {
<add> bits = 64;
<add> }
<add> processor = (32 == bits) ? Processor.INTEL_32 : Processor.INTEL_64;
<add> }
<add> return processor;
<ide> }
<ide>
<ide> /**
<ide> String path = "META-INF" + DELIM + "lib" + DELIM;
<ide> switch (getArchitecture()) {
<ide> case LINUX_32:
<add> path += "i386-Linux-g++";
<add> break;
<ide> case LINUX_64:
<del> path += "i386-Linux-g++";
<add> path += "x86_64-Linux-g++";
<ide> break;
<ide> case WINDOWS_32:
<ide> case WINDOWS_64:
<ide> path += "x86-Windows-msvc";
<ide> break;
<ide> case OSX_32:
<del> path += "x86_32-MacOSX-gpp";
<add> path += "i386-MacOSX-gpp";
<ide> break;
<ide> case OSX_64:
<ide> path += "x86_64-MacOSX-gpp";
<add> break;
<add> case OSX_PPC:
<add> path += "ppc-MacOSX-gpp";
<ide> break;
<ide> }
<ide> return path + DELIM; |
|
Java | apache-2.0 | 40b3a52f92b2515fe72df72f0230a4290081fa0e | 0 | millmanorama/autopsy,wschaeferB/autopsy,APriestman/autopsy,esaunders/autopsy,APriestman/autopsy,rcordovano/autopsy,APriestman/autopsy,narfindustries/autopsy,wschaeferB/autopsy,narfindustries/autopsy,narfindustries/autopsy,rcordovano/autopsy,dgrove727/autopsy,esaunders/autopsy,esaunders/autopsy,millmanorama/autopsy,millmanorama/autopsy,APriestman/autopsy,APriestman/autopsy,dgrove727/autopsy,wschaeferB/autopsy,rcordovano/autopsy,APriestman/autopsy,esaunders/autopsy,rcordovano/autopsy,esaunders/autopsy,wschaeferB/autopsy,rcordovano/autopsy,wschaeferB/autopsy,millmanorama/autopsy,APriestman/autopsy,rcordovano/autopsy,dgrove727/autopsy | /*
* Autopsy Forensic Browser
*
* Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.stream.Collectors;
import javax.swing.SwingWorker;
import org.apache.commons.lang.StringUtils;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.aggregate.ProgressContributor;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.EscapeUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
/**
* Stores the results from running a Solr query (which could contain multiple
* keywords).
*
*/
class QueryResults {
private static final Logger logger = Logger.getLogger(QueryResults.class.getName());
private static final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
/**
* The query that generated the results.
*/
private final KeywordSearchQuery keywordSearchQuery;
/**
* A map of keywords to keyword hits.
*/
private final Map<Keyword, List<KeywordHit>> results = new HashMap<>();
/**
* The list of keywords
*/
// TODO: This is redundant. The keyword list is in the query.
private final KeywordList keywordList;
QueryResults(KeywordSearchQuery query, KeywordList keywordList) {
this.keywordSearchQuery = query;
this.keywordList = keywordList;
}
void addResult(Keyword keyword, List<KeywordHit> hits) {
results.put(keyword, hits);
}
// TODO: This is redundant. The keyword list is in the query.
KeywordList getKeywordList() {
return keywordList;
}
KeywordSearchQuery getQuery() {
return keywordSearchQuery;
}
List<KeywordHit> getResults(Keyword keyword) {
return results.get(keyword);
}
Set<Keyword> getKeywords() {
return results.keySet();
}
/**
* Writes the keyword hits encapsulated in this query result to the
* blackboard. Makes one artifact per keyword per searched object (file or
* artifact), i.e., if a keyword is found several times in the object, only
* one artifact is created.
*
* @param progress Can be null.
* @param subProgress Can be null.
* @param worker The Swing worker that is writing the hits, needed to
* support cancellation.
* @param notifyInbox Whether or not write a message to the ingest messages
* inbox.
*
* @return The artifacts that were created.
*/
Collection<BlackboardArtifact> writeAllHitsToBlackBoard(ProgressHandle progress, ProgressContributor subProgress, SwingWorker<Object, Void> worker, boolean notifyInbox) {
final Collection<BlackboardArtifact> newArtifacts = new ArrayList<>();
if (progress != null) {
progress.start(getKeywords().size());
}
int unitProgress = 0;
for (final Keyword keyword : getKeywords()) {
if (worker.isCancelled()) {
logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keyword.getSearchTerm()); //NON-NLS
break;
}
// Update progress object(s), if any
if (progress != null) {
progress.progress(keyword.toString(), unitProgress);
}
if (subProgress != null) {
String hitDisplayStr = keyword.getSearchTerm();
if (hitDisplayStr.length() > 50) {
hitDisplayStr = hitDisplayStr.substring(0, 49) + "...";
}
subProgress.progress(keywordList.getName() + ": " + hitDisplayStr, unitProgress);
}
for (KeywordHit hit : getOneHitPerObject(keyword)) {
String termString = keyword.getSearchTerm();
String snippet = hit.getSnippet();
if (StringUtils.isBlank(snippet)) {
final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(termString);
try {
//this doesn't work for regex queries...
snippet = LuceneQuery.querySnippet(snippetQuery, hit.getSolrObjectId(), hit.getChunkId(), !keywordSearchQuery.isLiteral(), true);
} catch (NoOpenCoreException e) {
logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); //NON-NLS
//no reason to continue
break;
} catch (Exception e) {
logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); //NON-NLS
continue;
}
}
KeywordCachedArtifact writeResult = keywordSearchQuery.writeSingleFileHitsToBlackBoard(keyword, hit, snippet, keywordList.getName());
if (writeResult != null) {
newArtifacts.add(writeResult.getArtifact());
if (notifyInbox) {
writeSingleFileInboxMessage(writeResult, hit.getContent());
}
} else {
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hit.getContent(), keyword.toString()}); //NON-NLS
}
}
++unitProgress;
}
// Update artifact browser
if (!newArtifacts.isEmpty()) {
newArtifacts.stream()
//group artifacts by type
.collect(Collectors.groupingBy(BlackboardArtifact::getArtifactTypeID))
//for each type send an event
.forEach((typeID, artifacts)
-> IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.fromID(typeID), artifacts)));
}
return newArtifacts;
}
/**
* Gets the first hit of the keyword.
*
* @param keyword
*
* @return Collection<KeywordHit> containing KeywordHits with lowest
* SolrObjectID-ChunkID pairs.
*/
private Collection<KeywordHit> getOneHitPerObject(Keyword keyword) {
HashMap<Long, KeywordHit> hits = new HashMap<>();
// create a list of KeywordHits. KeywordHits with lowest chunkID is added the the list.
for (KeywordHit hit : getResults(keyword)) {
if (!hits.containsKey(hit.getSolrObjectId())) {
hits.put(hit.getSolrObjectId(), hit);
} else if (hit.getChunkId() < hits.get(hit.getSolrObjectId()).getChunkId()) {
hits.put(hit.getSolrObjectId(), hit);
}
}
return hits.values();
}
/**
* Generate an ingest inbox message for given keyword in given file
*
* @param written
* @param hitFile
*/
private void writeSingleFileInboxMessage(KeywordCachedArtifact written, Content hitContent) {
StringBuilder subjectSb = new StringBuilder();
StringBuilder detailsSb = new StringBuilder();
if (!keywordSearchQuery.isLiteral()) {
subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl"));
} else {
subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl"));
}
String uniqueKey = null;
BlackboardAttribute attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID());
if (attr != null) {
final String keyword = attr.getValueString();
subjectSb.append(keyword);
uniqueKey = keyword.toLowerCase();
}
//details
detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS
//hit
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitThLbl"));
detailsSb.append("<td>").append(EscapeUtil.escapeHtml(attr.getValueString())).append("</td>"); //NON-NLS
detailsSb.append("</tr>"); //NON-NLS
//preview
attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID());
if (attr != null) {
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl"));
detailsSb.append("<td>").append(EscapeUtil.escapeHtml(attr.getValueString())).append("</td>"); //NON-NLS
detailsSb.append("</tr>"); //NON-NLS
}
//file
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl"));
if (hitContent instanceof AbstractFile) {
AbstractFile hitFile = (AbstractFile) hitContent;
detailsSb.append("<td>").append(hitFile.getParentPath()).append(hitFile.getName()).append("</td>"); //NON-NLS
} else {
detailsSb.append("<td>").append(hitContent.getName()).append("</td>"); //NON-NLS
}
detailsSb.append("</tr>"); //NON-NLS
//list
attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
if (attr != null) {
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl"));
detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); //NON-NLS
detailsSb.append("</tr>"); //NON-NLS
}
//regex
if (!keywordSearchQuery.isLiteral()) {
attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID());
if (attr != null) {
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl"));
detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); //NON-NLS
detailsSb.append("</tr>"); //NON-NLS
}
}
detailsSb.append("</table>"); //NON-NLS
IngestServices.getInstance().postMessage(IngestMessage.createDataMessage(MODULE_NAME, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact()));
}
}
| KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java | /*
* Autopsy Forensic Browser
*
* Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.stream.Collectors;
import javax.swing.SwingWorker;
import org.apache.commons.lang.StringUtils;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.aggregate.ProgressContributor;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.EscapeUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
/**
* Stores the results from running a Solr query (which could contain multiple
* keywords).
*
*/
class QueryResults {
private static final Logger logger = Logger.getLogger(QueryResults.class.getName());
private static final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
/**
* The query that generated the results.
*/
private final KeywordSearchQuery keywordSearchQuery;
/**
* A map of keywords to keyword hits.
*/
private final Map<Keyword, List<KeywordHit>> results = new HashMap<>();
/**
* The list of keywords
*/
// TODO: This is redundant. The keyword list is in the query.
private final KeywordList keywordList;
QueryResults(KeywordSearchQuery query, KeywordList keywordList) {
this.keywordSearchQuery = query;
this.keywordList = keywordList;
}
void addResult(Keyword keyword, List<KeywordHit> hits) {
results.put(keyword, hits);
}
// TODO: This is redundant. The keyword list is in the query.
KeywordList getKeywordList() {
return keywordList;
}
KeywordSearchQuery getQuery() {
return keywordSearchQuery;
}
List<KeywordHit> getResults(Keyword keyword) {
return results.get(keyword);
}
Set<Keyword> getKeywords() {
return results.keySet();
}
/**
* Writes the keyword hits encapsulated in this query result to the
* blackboard. Makes one artifact per keyword per searched object (file or
* artifact), i.e., if a keyword is found several times in the object, only
* one artifact is created.
*
* @param progress Can be null.
* @param subProgress Can be null.
* @param worker The Swing worker that is writing the hits, needed to
* support cancellation.
* @param notifyInbox Whether or not write a message to the ingest messages
* inbox.
*
* @return The artifacts that were created.
*/
Collection<BlackboardArtifact> writeAllHitsToBlackBoard(ProgressHandle progress, ProgressContributor subProgress, SwingWorker<Object, Void> worker, boolean notifyInbox) {
final Collection<BlackboardArtifact> newArtifacts = new ArrayList<>();
if (progress != null) {
progress.start(getKeywords().size());
}
int unitProgress = 0;
for (final Keyword keyword : getKeywords()) {
if (worker.isCancelled()) {
logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keyword.getSearchTerm()); //NON-NLS
break;
}
// Update progress object(s), if any
if (progress != null) {
progress.progress(keyword.toString(), unitProgress);
}
if (subProgress != null) {
String hitDisplayStr = keyword.getSearchTerm();
if (hitDisplayStr.length() > 50) {
hitDisplayStr = hitDisplayStr.substring(0, 49) + "...";
}
subProgress.progress(keywordList.getName() + ": " + hitDisplayStr, unitProgress);
}
for (KeywordHit hit : getOneHitPerObject(keyword)) {
String termString = keyword.getSearchTerm();
String snippet = hit.getSnippet();
if (StringUtils.isBlank(snippet)) {
final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(termString);
try {
//this doesn't work for regex queries...
snippet = LuceneQuery.querySnippet(snippetQuery, hit.getSolrObjectId(), hit.getChunkId(), !keywordSearchQuery.isLiteral(), true);
} catch (NoOpenCoreException e) {
logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); //NON-NLS
//no reason to continue
break;
} catch (Exception e) {
logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); //NON-NLS
continue;
}
}
if (snippet != null) {
KeywordCachedArtifact writeResult = keywordSearchQuery.writeSingleFileHitsToBlackBoard(keyword, hit, snippet, keywordList.getName());
if (writeResult != null) {
newArtifacts.add(writeResult.getArtifact());
if (notifyInbox) {
writeSingleFileInboxMessage(writeResult, hit.getContent());
}
} else {
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hit.getContent(), keyword.toString()}); //NON-NLS
}
}
}
++unitProgress;
}
// Update artifact browser
if (!newArtifacts.isEmpty()) {
newArtifacts.stream()
//group artifacts by type
.collect(Collectors.groupingBy(BlackboardArtifact::getArtifactTypeID))
//for each type send an event
.forEach((typeID, artifacts)
-> IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.fromID(typeID), artifacts)));
}
return newArtifacts;
}
/**
* Gets the first hit of the keyword.
*
* @param keyword
*
* @return Collection<KeywordHit> containing KeywordHits with lowest
* SolrObjectID-ChunkID pairs.
*/
private Collection<KeywordHit> getOneHitPerObject(Keyword keyword) {
HashMap<Long, KeywordHit> hits = new HashMap<>();
// create a list of KeywordHits. KeywordHits with lowest chunkID is added the the list.
for (KeywordHit hit : getResults(keyword)) {
if (!hits.containsKey(hit.getSolrObjectId())) {
hits.put(hit.getSolrObjectId(), hit);
} else if (hit.getChunkId() < hits.get(hit.getSolrObjectId()).getChunkId()) {
hits.put(hit.getSolrObjectId(), hit);
}
}
return hits.values();
}
/**
* Generate an ingest inbox message for given keyword in given file
*
* @param written
* @param hitFile
*/
private void writeSingleFileInboxMessage(KeywordCachedArtifact written, Content hitContent) {
StringBuilder subjectSb = new StringBuilder();
StringBuilder detailsSb = new StringBuilder();
if (!keywordSearchQuery.isLiteral()) {
subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl"));
} else {
subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl"));
}
String uniqueKey = null;
BlackboardAttribute attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID());
if (attr != null) {
final String keyword = attr.getValueString();
subjectSb.append(keyword);
uniqueKey = keyword.toLowerCase();
}
//details
detailsSb.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS
//hit
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitThLbl"));
detailsSb.append("<td>").append(EscapeUtil.escapeHtml(attr.getValueString())).append("</td>"); //NON-NLS
detailsSb.append("</tr>"); //NON-NLS
//preview
attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID());
if (attr != null) {
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl"));
detailsSb.append("<td>").append(EscapeUtil.escapeHtml(attr.getValueString())).append("</td>"); //NON-NLS
detailsSb.append("</tr>"); //NON-NLS
}
//file
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl"));
if (hitContent instanceof AbstractFile) {
AbstractFile hitFile = (AbstractFile) hitContent;
detailsSb.append("<td>").append(hitFile.getParentPath()).append(hitFile.getName()).append("</td>"); //NON-NLS
} else {
detailsSb.append("<td>").append(hitContent.getName()).append("</td>"); //NON-NLS
}
detailsSb.append("</tr>"); //NON-NLS
//list
attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID());
if (attr != null) {
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl"));
detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); //NON-NLS
detailsSb.append("</tr>"); //NON-NLS
}
//regex
if (!keywordSearchQuery.isLiteral()) {
attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID());
if (attr != null) {
detailsSb.append("<tr>"); //NON-NLS
detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl"));
detailsSb.append("<td>").append(attr.getValueString()).append("</td>"); //NON-NLS
detailsSb.append("</tr>"); //NON-NLS
}
}
detailsSb.append("</table>"); //NON-NLS
IngestServices.getInstance().postMessage(IngestMessage.createDataMessage(MODULE_NAME, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact()));
}
}
| save hits even if we never get a snippet
| KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java | save hits even if we never get a snippet | <ide><path>eywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java
<ide> continue;
<ide> }
<ide> }
<del> if (snippet != null) {
<del> KeywordCachedArtifact writeResult = keywordSearchQuery.writeSingleFileHitsToBlackBoard(keyword, hit, snippet, keywordList.getName());
<del> if (writeResult != null) {
<del> newArtifacts.add(writeResult.getArtifact());
<del> if (notifyInbox) {
<del> writeSingleFileInboxMessage(writeResult, hit.getContent());
<del> }
<del> } else {
<del> logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hit.getContent(), keyword.toString()}); //NON-NLS
<add> KeywordCachedArtifact writeResult = keywordSearchQuery.writeSingleFileHitsToBlackBoard(keyword, hit, snippet, keywordList.getName());
<add> if (writeResult != null) {
<add> newArtifacts.add(writeResult.getArtifact());
<add> if (notifyInbox) {
<add> writeSingleFileInboxMessage(writeResult, hit.getContent());
<ide> }
<add> } else {
<add> logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hit.getContent(), keyword.toString()}); //NON-NLS
<ide> }
<ide> }
<ide> ++unitProgress; |
|
Java | apache-2.0 | c939dbcaa0cd66d3470e85cb2c223e53577f9cce | 0 | webanno/webanno,webanno/webanno,webanno/webanno,webanno/webanno | /*******************************************************************************
* Copyright 2012
* Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.tudarmstadt.ukp.clarin.webanno.tsv;
import static org.apache.commons.io.IOUtils.closeQuietly;
import static org.apache.commons.lang.StringEscapeUtils.unescapeJava;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TreeMap;
import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.LineIterator;
import org.apache.commons.lang.StringUtils;
import org.apache.uima.cas.ArrayFS;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.Feature;
import org.apache.uima.cas.FeatureStructure;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.collection.CollectionException;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.fit.util.CasUtil;
import org.apache.uima.jcas.JCas;
import de.tudarmstadt.ukp.clarin.webanno.tsv.util.AnnotationUnit;
import de.tudarmstadt.ukp.dkpro.core.api.io.JCasResourceCollectionReader_ImplBase;
import de.tudarmstadt.ukp.dkpro.core.api.lexmorph.type.pos.POS;
import de.tudarmstadt.ukp.dkpro.core.api.metadata.type.DocumentMetaData;
import de.tudarmstadt.ukp.dkpro.core.api.parameter.ComponentParameters;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Lemma;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token;
/**
* This class reads a WebAnno compatible TSV files and create annotations from
* the information provided. The the header of the file records the existing
* annotation layers with their features names.<br>
* If the annotation type or a feature in the type do not exist in the CAS, it
* throws an error.<br>
* Span types starts with the prefix <b> #T_SP=</b>. <br>
* Relation types starts with the prefix <b> #T_RL=</b>. <br>
* Chain types starts with the prefix <b> #T_CH=</b>. <br>
* Slot features start with prefix <b> ROLE_</b>. <br>
* All features of a type follows the the name separated by <b>|</b> character.
* <br>
*/
public class WebannoTsv3Reader extends JCasResourceCollectionReader_ImplBase {
private static final String TAB = "\t";
private static final String LF = "\n";
private static final String REF_REL = "referenceRelation";
private static final String REF_LINK = "referenceType";
private static final String CHAIN = "Chain";
private static final String FIRST = "first";
private static final String NEXT = "next";
public static final String ROLE = "ROLE_";
public static final String BT = "BT_"; // base type for the relation
// annotation
private static final String DEPENDENT = "Dependent";
private static final String GOVERNOR = "Governor";
private String fileName;
private int columns = 2;// token number + token columns (minimum required)
private Map<Type, Set<Feature>> allLayers = new LinkedHashMap<Type, Set<Feature>>();
private Map<Feature, Type> roleLinks = new HashMap<>();
private Map<Feature, Type> roleTargets = new HashMap<>();
private Map<Feature, Type> slotLinkTypes = new HashMap<>();
private StringBuilder coveredText = new StringBuilder();
// for each type, for each unit, annotations per position
private Map<Type, Map<AnnotationUnit, List<String>>> annotationsPerPostion = new LinkedHashMap<>();
private Map<Type, Map<Integer, Map<Integer, AnnotationFS>>> chainAnnosPerTyep = new HashMap<>();
private List<AnnotationUnit> units = new ArrayList<>();
private Map<String, AnnotationUnit> token2Units = new HashMap<>();
private Map<AnnotationUnit, Token> units2Tokens = new HashMap<>();
private Map<Integer, Type> layerMaps = new LinkedHashMap<>();
private Map<Type, Feature> depFeatures = new HashMap<>();
private Map<Type, Type> depTypess = new HashMap<>();
// record the annotation at ref position when it is multiple token
// annotation
private Map<Type, Map<AnnotationUnit, Map<Integer, AnnotationFS>>> annoUnitperAnnoFs = new HashMap<>();
public void convertToCas(JCas aJCas, InputStream aIs, String aEncoding) throws IOException
{
DocumentMetaData documentMetadata = DocumentMetaData.get(aJCas);
fileName = documentMetadata.getDocumentTitle();
// setLayerAndFeature(aJCas, aIs, aEncoding);
setAnnotations(aJCas, aIs, aEncoding);
aJCas.setDocumentText(coveredText.toString());
}
/**
* Iterate through lines and create span annotations accordingly. For
* multiple span annotation, based on the position of the annotation in the
* line, update only the end position of the annotation
*/
private void setAnnotations(JCas aJCas, InputStream aIs, String aEncoding) throws IOException {
// getting header information
LineIterator lineIterator = IOUtils.lineIterator(aIs, aEncoding);
while (lineIterator.hasNext()) {
String line = lineIterator.next().trim();
if (line.startsWith("#T_")) {
setLayerAndFeature(aJCas, line);
continue;
}
if (line.startsWith("#Text=")) {
createSentence(aJCas, line);
continue;
}
if (line.startsWith("#FORMAT=")) {
continue;
}
if (line.trim().isEmpty()) {
continue;
}
int count = StringUtils.countMatches(line, "\t");
if (columns != count) {
throw new IOException(fileName + " This is not a valid TSV File. check this line: " + line);
}
String regex = "(?<!\\\\)" + Pattern.quote(TAB);
String[] lines = line.split(regex);
int begin = Integer.parseInt(lines[1].split("-")[0]);
int end = Integer.parseInt(lines[1].split("-")[1]);
AnnotationUnit unit = createTokens(aJCas, lines, begin, end);
int ind = 3;
setAnnosPerTypePerUnit(lines, unit, ind);
}
Map<Type, Map<AnnotationUnit, List<AnnotationFS>>> annosPerTypePerUnit = new HashMap<>();
setAnnosPerUnit(aJCas, annosPerTypePerUnit);
addAnnotations(aJCas, annosPerTypePerUnit);
addChainAnnotations(aJCas);
}
/**
* The individual link annotations are stored in a {@link TreeMap}
* (chainAnnosPerTye) with chain number and link number references, sorted
* in an ascending order <br>
* Iterate over each chain number and link number references and construct
* the chain
*
* @param aJCas
*/
private void addChainAnnotations(JCas aJCas) {
for (Type linkType : chainAnnosPerTyep.keySet()) {
for (int chainNo : chainAnnosPerTyep.get(linkType).keySet()) {
Type chainType = aJCas.getCas().getTypeSystem()
.getType(linkType.getName().substring(0, linkType.getName().length() - 4) + CHAIN);
Feature firstF = chainType.getFeatureByBaseName(FIRST);
Feature nextF = linkType.getFeatureByBaseName(NEXT);
FeatureStructure chain = aJCas.getCas().createFS(chainType);
aJCas.addFsToIndexes(chain);
AnnotationFS firstFs = chainAnnosPerTyep.get(linkType).get(chainNo).get(1);
AnnotationFS linkFs = firstFs;
chain.setFeatureValue(firstF, firstFs);
for (int i = 2; i <= chainAnnosPerTyep.get(linkType).get(chainNo).size(); i++) {
linkFs.setFeatureValue(nextF, chainAnnosPerTyep.get(linkType).get(chainNo).get(i));
linkFs = chainAnnosPerTyep.get(linkType).get(chainNo).get(i);
}
}
}
}
/**
* Importing span annotations including slot annotations
*
* @param aJCas
* @param aAnnosPerTypePerUnit
*/
private void addAnnotations(JCas aJCas, Map<Type, Map<AnnotationUnit, List<AnnotationFS>>> aAnnosPerTypePerUnit) {
for (Type type : annotationsPerPostion.keySet()) {
Map<Integer, AnnotationFS> multiTokUnits = new HashMap<>();
for (AnnotationUnit unit : annotationsPerPostion.get(type).keySet()) {
int end = unit.end;
List<AnnotationFS> annos = aAnnosPerTypePerUnit.get(type).get(unit);
int j = 0;
Feature linkeF = null;
Map<AnnotationFS, List<FeatureStructure>> linkFSesPerSlotAnno = new HashMap<>();
for (Feature feat : allLayers.get(type)) {
String anno = annotationsPerPostion.get(type).get(unit).get(j);
if (!anno.equals("_")) {
int i = 0;
// if it is a slot annotation (multiple slots per
// single annotation
// (Target1<--role1--Base--role2-->Target2)
int slot = 0;
boolean targetAdd = false;
String stackedAnnoRegex = "(?<!\\\\)" + Pattern.quote("||");
for (String mAnnos : anno.split(stackedAnnoRegex)) {
String multipleAnnoRegex = "(?<!\\\\)" + Pattern.quote("|");
for (String mAnno : mAnnos.split(multipleAnnoRegex)) {
int ref = 1;
String depRef = "";
if (mAnno.endsWith("]")) {
depRef = mAnno.substring(mAnno.indexOf("[") + 1, mAnno.length() - 1);
ref = depRef.contains("_") ? 1
: Integer.valueOf(
mAnno.substring(mAnno.indexOf("[") + 1, mAnno.length() - 1));
mAnno = mAnno.substring(0, mAnno.indexOf("["));
}
if (mAnno.startsWith("B-")) {
multiTokUnits.put(ref, annos.get(i));
mAnno = mAnno.substring(2);
}
if (mAnno.startsWith("I-")) {
Feature endF = type.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_END);
multiTokUnits.get(ref).setIntValue(endF, end);
if (feat.getShortName().equals(REF_LINK)) {
// since REF_REL do not start with BIO,
// update it it...
annos.set(i, multiTokUnits.get(ref));
}
setAnnoRefPerUnit(unit, type, ref, multiTokUnits.get(ref));
} else {
if (mAnno.equals(feat.getName())) {
mAnno = null;
}
if (roleLinks.containsKey(feat)) {
linkeF = feat;
FeatureStructure link = aJCas.getCas().createFS(slotLinkTypes.get(feat));
Feature roleFeat = link.getType().getFeatureByBaseName("role");
mAnno = getEscapeChars(mAnno);
link.setStringValue(roleFeat, mAnno);
linkFSesPerSlotAnno.putIfAbsent(annos.get(i), new ArrayList<>());
linkFSesPerSlotAnno.get(annos.get(i)).add(link);
} else if (roleTargets.containsKey(feat)) {
FeatureStructure link = linkFSesPerSlotAnno.get(annos.get(i)).get(slot);
int customTypeNumber = 0;
if(mAnno.split("-").length>2){
customTypeNumber =Integer.valueOf(mAnno.substring(mAnno.lastIndexOf("-")+1));
mAnno = mAnno.substring(0,mAnno.lastIndexOf("-"));
}
AnnotationUnit targetUnit = token2Units.get(mAnno);
Type tType = null;
if (customTypeNumber == 0){
tType = roleTargets.get(feat);
}
else{
tType = layerMaps.get(customTypeNumber);
}
AnnotationFS targetFs = aAnnosPerTypePerUnit.get(tType)
.get(targetUnit).get(ref - 1);
link.setFeatureValue(feat, targetFs);
addSlotAnnotations(linkFSesPerSlotAnno, linkeF);
targetAdd = true;
slot++;
} else if (feat.getShortName().equals(REF_REL)) {
int chainNo = Integer.valueOf(mAnno.split("->")[1].split("-")[0]);
int LinkNo = Integer.valueOf(mAnno.split("->")[1].split("-")[1]);
chainAnnosPerTyep.putIfAbsent(type, new TreeMap<>());
if (chainAnnosPerTyep.get(type).get(chainNo) != null
&& chainAnnosPerTyep.get(type).get(chainNo).get(LinkNo) != null) {
continue;
}
String refRel = mAnno.split("->")[0];
refRel = getEscapeChars(refRel);
annos.get(i).setFeatureValueFromString(feat, refRel);
chainAnnosPerTyep.putIfAbsent(type, new TreeMap<>());
chainAnnosPerTyep.get(type).putIfAbsent(chainNo, new TreeMap<>());
chainAnnosPerTyep.get(type).get(chainNo).put(LinkNo, annos.get(i));
} else if (feat.getShortName().equals(REF_LINK)) {
mAnno = getEscapeChars(mAnno);
annos.get(i).setFeatureValueFromString(feat, mAnno);
aJCas.addFsToIndexes(annos.get(i));
}
else if (depFeatures.get(type) != null && depFeatures.get(type).equals(feat)) {
int g = depRef.isEmpty() ? 1 : Integer.valueOf(depRef.split("_")[0]);
int d = depRef.isEmpty() ? 1 : Integer.valueOf(depRef.split("_")[1]);
Type depType = depTypess.get(type);
AnnotationUnit govUnit = token2Units.get(mAnno);
AnnotationFS govFs;
AnnotationFS depFs;
if (depType.getName().equals(POS.class.getName())) {
depType = aJCas.getCas().getTypeSystem().getType(Token.class.getName());
govFs = units2Tokens.get(govUnit);
depFs = units2Tokens.get(unit);
} else {
govFs = aAnnosPerTypePerUnit.get(depType).get(govUnit).get(g - 1);
depFs = aAnnosPerTypePerUnit.get(depType).get(unit).get(d - 1);
}
annos.get(i).setFeatureValue(feat, depFs);
annos.get(i).setFeatureValue(type.getFeatureByBaseName(GOVERNOR), govFs);
if (depFs.getBegin() <= annos.get(i).getBegin()) {
Feature beginF = type.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_BEGIN);
annos.get(i).setIntValue(beginF, depFs.getBegin());
} else {
Feature endF = type.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_END);
annos.get(i).setIntValue(endF, depFs.getEnd());
}
} else {
mAnno = getEscapeChars(mAnno);
annos.get(i).setFeatureValueFromString(feat, mAnno);
aJCas.addFsToIndexes(annos.get(i));
setAnnoRefPerUnit(unit, type, ref, annos.get(i));
}
}
}
if (type.getName().equals(POS.class.getName())) {
units2Tokens.get(unit).setPos((POS) annos.get(i));
}
if (type.getName().equals(Lemma.class.getName())) {
units2Tokens.get(unit).setLemma((Lemma) annos.get(i));
}
i++;
}
if(targetAdd){
linkFSesPerSlotAnno = new HashMap<>();
}
}
j++;
}
}
}
}
private String getEscapeChars(String aAnno) {
if(aAnno==null){
return null;
}
return unescapeJava(aAnno);
}
/**
* update a base annotation with slot annotations
*
* @param linkFSesPerAnno
* contains list of slot annotations per a base annotation
* @param aLinkeF
* The link slot annotation feature
*/
private void addSlotAnnotations(Map<AnnotationFS, List<FeatureStructure>> linkFSesPerAnno, Feature aLinkeF) {
for (AnnotationFS anno : linkFSesPerAnno.keySet()) {
ArrayFS array = anno.getCAS().createArrayFS(linkFSesPerAnno.get(anno).size());
array.copyFromArray(
linkFSesPerAnno.get(anno).toArray(new FeatureStructure[linkFSesPerAnno.get(anno).size()]), 0, 0,
linkFSesPerAnno.get(anno).size());
anno.setFeatureValue(aLinkeF, array);
anno.getCAS().addFsToIndexes(anno);
}
}
/**
* Gets annotations from lines (of {@link AnnotationUnit}s) and save for the
* later access, while reading the document the first time. <br>
*
* @param lines
* TSV lines exported from WebAnno
* @param unit
* the annotation unit (Token or sub-tokens)
* @param ind
* index of the annotation, from the TAB separated annotations in
* the TSV lines
*/
private void setAnnosPerTypePerUnit(String[] lines, AnnotationUnit unit, int ind) {
for (Type type : allLayers.keySet()) {
annotationsPerPostion.putIfAbsent(type, new LinkedHashMap<>());
for (Feature f : allLayers.get(type)) {
annotationsPerPostion.get(type).put(unit,
annotationsPerPostion.get(type).getOrDefault(unit, new ArrayList<>()));
annotationsPerPostion.get(type).get(unit).add(lines[ind]);
ind++;
}
}
}
private void setAnnosPerUnit(JCas aJCas, Map<Type, Map<AnnotationUnit, List<AnnotationFS>>> aAnnosPerTypePerUnit) {
for (Type type : annotationsPerPostion.keySet()) {
Map<AnnotationUnit, List<AnnotationFS>> annosPerUnit = new HashMap<>();
for (AnnotationUnit unit : annotationsPerPostion.get(type).keySet()) {
int begin = unit.begin;
int end = unit.end;
List<AnnotationFS> annos = new ArrayList<>();
// if there are multiple annos
int multAnnos = 1;
for (String anno : annotationsPerPostion.get(type).get(unit)) {
String stackedAnnoRegex = "(?<!\\\\)" + Pattern.quote("||");
if (anno.split(stackedAnnoRegex).length > multAnnos) {
multAnnos = anno.split(stackedAnnoRegex).length;
}
}
for (int i = 0; i < multAnnos; i++) {
annos.add(aJCas.getCas().createAnnotation(type, begin, end));
}
annosPerUnit.put(unit, annos);
}
aAnnosPerTypePerUnit.put(type, annosPerUnit);
}
}
private void setAnnoRefPerUnit(AnnotationUnit unit, Type type, int ref, AnnotationFS aAnnoFs) {
annoUnitperAnnoFs.putIfAbsent(type, new HashMap<>());
annoUnitperAnnoFs.get(type).putIfAbsent(unit, new HashMap<>());
annoUnitperAnnoFs.get(type).get(unit).put(ref, aAnnoFs);
}
private AnnotationUnit createTokens(JCas aJCas, String[] lines, int begin, int end) {
if (!lines[0].startsWith("-")) {
Token token = new Token(aJCas, begin, end);
AnnotationUnit unit = new AnnotationUnit(begin, end, false, "");
units.add(unit);
token.addToIndexes();
token2Units.put(lines[0], unit);
units2Tokens.put(unit, token);
return unit;
} else {
AnnotationUnit unit = new AnnotationUnit(begin, end, true, "");
units.add(unit);
token2Units.put(lines[0], unit);
return unit;
}
}
private void createSentence(JCas aJCas, String line) {
String text = line.substring(6);
String beginEnd = text.substring(0, text.indexOf("#"));
text = text.substring(text.indexOf("#") + 1);
int begin = Integer.parseInt(beginEnd.split("-")[0]);
int end = Integer.parseInt(beginEnd.split("-")[1]);
coveredText.append(text + LF);
Sentence sentence = new Sentence(aJCas, begin, end);
sentence.addToIndexes();
}
/**
* Get the type and feature information from the TSV file header
*
* @param aJcas
* @param header
* the header line
* @throws IOException
* If the type or the feature do not exist in the CAs
*/
private void setLayerAndFeature(JCas aJcas, String header) throws IOException {
try {
StringTokenizer headerTk = new StringTokenizer(header, "#");
while (headerTk.hasMoreTokens()) {
String layerNames = headerTk.nextToken().trim();
StringTokenizer layerTk = new StringTokenizer(layerNames, "|");
Set<Feature> features = new LinkedHashSet<Feature>();
String layerName = layerTk.nextToken().trim();
layerName = layerName.substring(layerName.indexOf("=") + 1);
Iterator<Type> types = aJcas.getTypeSystem().getTypeIterator();
boolean layerExists = false;
while (types.hasNext()) {
if (types.next().getName().equals(layerName)) {
layerExists = true;
break;
}
}
if (!layerExists) {
throw new IOException(fileName + " This is not a valid TSV File. The layer " + layerName
+ " is not created in the project.");
}
Type layer = CasUtil.getType(aJcas.getCas(), layerName);
while (layerTk.hasMoreTokens()) {
String ft = layerTk.nextToken().trim();
columns++;
Feature feature;
if (ft.startsWith(BT)) {
feature = layer.getFeatureByBaseName(DEPENDENT);
depFeatures.put(layer, feature);
depTypess.put(layer, CasUtil.getType(aJcas.getCas(), ft.substring(3)));
} else {
feature = layer.getFeatureByBaseName(ft);
}
if (ft.startsWith(ROLE)) {
ft = ft.substring(5);
String t = layerTk.nextToken().toString();
columns++;
Type tType = CasUtil.getType(aJcas.getCas(), t);
String fName = ft.substring(0, ft.indexOf("_"));
Feature slotF = layer.getFeatureByBaseName(fName.substring(fName.indexOf(":") + 1));
if (slotF == null) {
throw new IOException(fileName + " This is not a valid TSV File. The feature " + ft
+ " is not created for the layer " + layerName);
}
features.add(slotF);
roleLinks.put(slotF, tType);
Type slotType = CasUtil.getType(aJcas.getCas(), ft.substring(ft.indexOf("_") + 1));
Feature tFeatore = slotType.getFeatureByBaseName("target");
if (tFeatore == null) {
throw new IOException(fileName + " This is not a valid TSV File. The feature " + ft
+ " is not created for the layer " + layerName);
}
roleTargets.put(tFeatore, tType);
features.add(tFeatore);
slotLinkTypes.put(slotF, slotType);
continue;
}
if (feature == null) {
throw new IOException(fileName + " This is not a valid TSV File. The feature " + ft
+ " is not created for the layer " + layerName);
}
features.add(feature);
}
allLayers.put(layer, features);
layerMaps.put(layerMaps.size()+1, layer);
}
} catch (Exception e) {
throw new IOException(e.getMessage() + "\nTSV header:\n" + header);
}
}
public static final String PARAM_ENCODING = ComponentParameters.PARAM_SOURCE_ENCODING;
@ConfigurationParameter(name = PARAM_ENCODING, mandatory = true, defaultValue = "UTF-8")
private String encoding;
@Override
public void getNext(JCas aJCas) throws IOException, CollectionException {
Resource res = nextFile();
initCas(aJCas, res);
InputStream is = null;
try {
is = res.getInputStream();
convertToCas(aJCas, is, encoding);
} finally {
closeQuietly(is);
}
}
}
| webanno-tsv/src/main/java/de/tudarmstadt/ukp/clarin/webanno/tsv/WebannoTsv3Reader.java | /*******************************************************************************
* Copyright 2012
* Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.tudarmstadt.ukp.clarin.webanno.tsv;
import static org.apache.commons.io.IOUtils.closeQuietly;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TreeMap;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.LineIterator;
import org.apache.commons.lang.StringUtils;
import org.apache.uima.cas.ArrayFS;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.Feature;
import org.apache.uima.cas.FeatureStructure;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.collection.CollectionException;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.fit.util.CasUtil;
import org.apache.uima.jcas.JCas;
import de.tudarmstadt.ukp.clarin.webanno.tsv.util.AnnotationUnit;
import de.tudarmstadt.ukp.dkpro.core.api.io.JCasResourceCollectionReader_ImplBase;
import de.tudarmstadt.ukp.dkpro.core.api.lexmorph.type.pos.POS;
import de.tudarmstadt.ukp.dkpro.core.api.metadata.type.DocumentMetaData;
import de.tudarmstadt.ukp.dkpro.core.api.parameter.ComponentParameters;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Lemma;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token;
/**
* This class reads a WebAnno compatible TSV files and create annotations from
* the information provided. The the header of the file records the existing
* annotation layers with their features names.<br>
* If the annotation type or a feature in the type do not exist in the CAS, it
* throws an error.<br>
* Span types starts with the prefix <b> #T_SP=</b>. <br>
* Relation types starts with the prefix <b> #T_RL=</b>. <br>
* Chain types starts with the prefix <b> #T_CH=</b>. <br>
* Slot features start with prefix <b> ROLE_</b>. <br>
* All features of a type follows the the name separated by <b>|</b> character.
* <br>
*/
public class WebannoTsv3Reader extends JCasResourceCollectionReader_ImplBase {
private static final String TAB = "\t";
private static final String LF = "\n";
private static final String REF_REL = "referenceRelation";
private static final String REF_LINK = "referenceType";
private static final String CHAIN = "Chain";
private static final String FIRST = "first";
private static final String NEXT = "next";
public static final String ROLE = "ROLE_";
public static final String BT = "BT_"; // base type for the relation
// annotation
// If | is used as annotation, escape it with `|` and replace it with WEBANNO_BAR for processing
// WEBANNO_BAR will be a reserved word, the same for [, ], and _
private static String WEBANNO_BAR = "WEBANNOBAR";
private static String WEBANNO_RBR = "WEBANNORBR";
private static String WEBANNO_LBR = "WEBANNOLBR";
private static String WEBANNO_UNDERSCORE = "WEBANNOUNDERSCORE";
private static final String DEPENDENT = "Dependent";
private static final String GOVERNOR = "Governor";
private String fileName;
private int columns = 2;// token number + token columns (minimum required)
private Map<Type, Set<Feature>> allLayers = new LinkedHashMap<Type, Set<Feature>>();
private Map<Feature, Type> roleLinks = new HashMap<>();
private Map<Feature, Type> roleTargets = new HashMap<>();
private Map<Feature, Type> slotLinkTypes = new HashMap<>();
private StringBuilder coveredText = new StringBuilder();
// for each type, for each unit, annotations per position
private Map<Type, Map<AnnotationUnit, List<String>>> annotationsPerPostion = new LinkedHashMap<>();
private Map<Type, Map<Integer, Map<Integer, AnnotationFS>>> chainAnnosPerTyep = new HashMap<>();
private List<AnnotationUnit> units = new ArrayList<>();
private Map<String, AnnotationUnit> token2Units = new HashMap<>();
private Map<AnnotationUnit, Token> units2Tokens = new HashMap<>();
private Map<Integer, Type> layerMaps = new LinkedHashMap<>();
private Map<Type, Feature> depFeatures = new HashMap<>();
private Map<Type, Type> depTypess = new HashMap<>();
// record the annotation at ref position when it is multiple token
// annotation
private Map<Type, Map<AnnotationUnit, Map<Integer, AnnotationFS>>> annoUnitperAnnoFs = new HashMap<>();
public void convertToCas(JCas aJCas, InputStream aIs, String aEncoding) throws IOException
{
DocumentMetaData documentMetadata = DocumentMetaData.get(aJCas);
fileName = documentMetadata.getDocumentTitle();
// setLayerAndFeature(aJCas, aIs, aEncoding);
setAnnotations(aJCas, aIs, aEncoding);
aJCas.setDocumentText(coveredText.toString());
}
/**
* Iterate through lines and create span annotations accordingly. For
* multiple span annotation, based on the position of the annotation in the
* line, update only the end position of the annotation
*/
private void setAnnotations(JCas aJCas, InputStream aIs, String aEncoding) throws IOException {
// getting header information
LineIterator lineIterator = IOUtils.lineIterator(aIs, aEncoding);
while (lineIterator.hasNext()) {
String line = lineIterator.next().trim();
if (line.startsWith("#T_")) {
setLayerAndFeature(aJCas, line);
continue;
}
if (line.startsWith("#Text=")) {
createSentence(aJCas, line);
continue;
}
if (line.startsWith("#FORMAT=")) {
continue;
}
if (line.trim().isEmpty()) {
continue;
}
// replace the `|` with WEBANNOBAR
line = replaceEscapeChars(line);
int count = StringUtils.countMatches(line, "\t");
if (columns != count) {
throw new IOException(fileName + " This is not a valid TSV File. check this line: " + line);
}
String[] lines = line.split(TAB);
int begin = Integer.parseInt(lines[1].split("-")[0]);
int end = Integer.parseInt(lines[1].split("-")[1]);
AnnotationUnit unit = createTokens(aJCas, lines, begin, end);
int ind = 3;
setAnnosPerTypePerUnit(lines, unit, ind);
}
Map<Type, Map<AnnotationUnit, List<AnnotationFS>>> annosPerTypePerUnit = new HashMap<>();
setAnnosPerUnit(aJCas, annosPerTypePerUnit);
addAnnotations(aJCas, annosPerTypePerUnit);
addChainAnnotations(aJCas);
}
private String replaceEscapeChars(String line) {
// because these characters are used to separate multiple annotations, empty annotations...
line = line.replace("\\|", WEBANNO_BAR).replace("\\_", WEBANNO_UNDERSCORE)
.replace("\\[", WEBANNO_RBR).replace("\\]", WEBANNO_LBR);
return line;
}
/**
* The individual link annotations are stored in a {@link TreeMap}
* (chainAnnosPerTye) with chain number and link number references, sorted
* in an ascending order <br>
* Iterate over each chain number and link number references and construct
* the chain
*
* @param aJCas
*/
private void addChainAnnotations(JCas aJCas) {
for (Type linkType : chainAnnosPerTyep.keySet()) {
for (int chainNo : chainAnnosPerTyep.get(linkType).keySet()) {
Type chainType = aJCas.getCas().getTypeSystem()
.getType(linkType.getName().substring(0, linkType.getName().length() - 4) + CHAIN);
Feature firstF = chainType.getFeatureByBaseName(FIRST);
Feature nextF = linkType.getFeatureByBaseName(NEXT);
FeatureStructure chain = aJCas.getCas().createFS(chainType);
aJCas.addFsToIndexes(chain);
AnnotationFS firstFs = chainAnnosPerTyep.get(linkType).get(chainNo).get(1);
AnnotationFS linkFs = firstFs;
chain.setFeatureValue(firstF, firstFs);
for (int i = 2; i <= chainAnnosPerTyep.get(linkType).get(chainNo).size(); i++) {
linkFs.setFeatureValue(nextF, chainAnnosPerTyep.get(linkType).get(chainNo).get(i));
linkFs = chainAnnosPerTyep.get(linkType).get(chainNo).get(i);
}
}
}
}
/**
* Importing span annotations including slot annotations
*
* @param aJCas
* @param aAnnosPerTypePerUnit
*/
private void addAnnotations(JCas aJCas, Map<Type, Map<AnnotationUnit, List<AnnotationFS>>> aAnnosPerTypePerUnit) {
for (Type type : annotationsPerPostion.keySet()) {
Map<Integer, AnnotationFS> multiTokUnits = new HashMap<>();
for (AnnotationUnit unit : annotationsPerPostion.get(type).keySet()) {
int end = unit.end;
List<AnnotationFS> annos = aAnnosPerTypePerUnit.get(type).get(unit);
int j = 0;
Feature linkeF = null;
Map<AnnotationFS, List<FeatureStructure>> linkFSesPerSlotAnno = new HashMap<>();
for (Feature feat : allLayers.get(type)) {
String anno = annotationsPerPostion.get(type).get(unit).get(j);
if (!anno.equals("_")) {
int i = 0;
// if it is a slot annotation (multiple slots per
// single annotation
// (Target1<--role1--Base--role2-->Target2)
int slot = 0;
boolean targetAdd = false;
for (String mAnnos : anno.split("\\|\\|")) {
for (String mAnno : mAnnos.split("\\|")) {
int ref = 1;
String depRef = "";
if (mAnno.endsWith("]")) {
depRef = mAnno.substring(mAnno.indexOf("[") + 1, mAnno.length() - 1);
ref = depRef.contains("_") ? 1
: Integer.valueOf(
mAnno.substring(mAnno.indexOf("[") + 1, mAnno.length() - 1));
mAnno = mAnno.substring(0, mAnno.indexOf("["));
}
if (mAnno.startsWith("B-")) {
multiTokUnits.put(ref, annos.get(i));
mAnno = mAnno.substring(2);
}
if (mAnno.startsWith("I-")) {
Feature endF = type.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_END);
multiTokUnits.get(ref).setIntValue(endF, end);
if (feat.getShortName().equals(REF_LINK)) {
// since REF_REL do not start with BIO,
// update it it...
annos.set(i, multiTokUnits.get(ref));
}
setAnnoRefPerUnit(unit, type, ref, multiTokUnits.get(ref));
} else {
if (mAnno.equals(feat.getName())) {
mAnno = null;
}
if (roleLinks.containsKey(feat)) {
linkeF = feat;
FeatureStructure link = aJCas.getCas().createFS(slotLinkTypes.get(feat));
Feature roleFeat = link.getType().getFeatureByBaseName("role");
mAnno = getEscapeChars(mAnno);
link.setStringValue(roleFeat, mAnno);
linkFSesPerSlotAnno.putIfAbsent(annos.get(i), new ArrayList<>());
linkFSesPerSlotAnno.get(annos.get(i)).add(link);
} else if (roleTargets.containsKey(feat)) {
FeatureStructure link = linkFSesPerSlotAnno.get(annos.get(i)).get(slot);
int customTypeNumber = 0;
if(mAnno.split("-").length>2){
customTypeNumber =Integer.valueOf(mAnno.substring(mAnno.lastIndexOf("-")+1));
mAnno = mAnno.substring(0,mAnno.lastIndexOf("-"));
}
AnnotationUnit targetUnit = token2Units.get(mAnno);
Type tType = null;
if (customTypeNumber == 0){
tType = roleTargets.get(feat);
}
else{
tType = layerMaps.get(customTypeNumber);
}
AnnotationFS targetFs = aAnnosPerTypePerUnit.get(tType)
.get(targetUnit).get(ref - 1);
link.setFeatureValue(feat, targetFs);
addSlotAnnotations(linkFSesPerSlotAnno, linkeF);
targetAdd = true;
slot++;
} else if (feat.getShortName().equals(REF_REL)) {
int chainNo = Integer.valueOf(mAnno.split("->")[1].split("-")[0]);
int LinkNo = Integer.valueOf(mAnno.split("->")[1].split("-")[1]);
chainAnnosPerTyep.putIfAbsent(type, new TreeMap<>());
if (chainAnnosPerTyep.get(type).get(chainNo) != null
&& chainAnnosPerTyep.get(type).get(chainNo).get(LinkNo) != null) {
continue;
}
String refRel = mAnno.split("->")[0];
refRel = getEscapeChars(refRel);
annos.get(i).setFeatureValueFromString(feat, refRel);
chainAnnosPerTyep.putIfAbsent(type, new TreeMap<>());
chainAnnosPerTyep.get(type).putIfAbsent(chainNo, new TreeMap<>());
chainAnnosPerTyep.get(type).get(chainNo).put(LinkNo, annos.get(i));
} else if (feat.getShortName().equals(REF_LINK)) {
mAnno = getEscapeChars(mAnno);
annos.get(i).setFeatureValueFromString(feat, mAnno);
aJCas.addFsToIndexes(annos.get(i));
}
else if (depFeatures.get(type) != null && depFeatures.get(type).equals(feat)) {
int g = depRef.isEmpty() ? 1 : Integer.valueOf(depRef.split("_")[0]);
int d = depRef.isEmpty() ? 1 : Integer.valueOf(depRef.split("_")[1]);
Type depType = depTypess.get(type);
AnnotationUnit govUnit = token2Units.get(mAnno);
AnnotationFS govFs;
AnnotationFS depFs;
if (depType.getName().equals(POS.class.getName())) {
depType = aJCas.getCas().getTypeSystem().getType(Token.class.getName());
govFs = units2Tokens.get(govUnit);
depFs = units2Tokens.get(unit);
} else {
govFs = aAnnosPerTypePerUnit.get(depType).get(govUnit).get(g - 1);
depFs = aAnnosPerTypePerUnit.get(depType).get(unit).get(d - 1);
}
annos.get(i).setFeatureValue(feat, depFs);
annos.get(i).setFeatureValue(type.getFeatureByBaseName(GOVERNOR), govFs);
if (depFs.getBegin() <= annos.get(i).getBegin()) {
Feature beginF = type.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_BEGIN);
annos.get(i).setIntValue(beginF, depFs.getBegin());
} else {
Feature endF = type.getFeatureByBaseName(CAS.FEATURE_BASE_NAME_END);
annos.get(i).setIntValue(endF, depFs.getEnd());
}
} else {
mAnno = getEscapeChars(mAnno);
annos.get(i).setFeatureValueFromString(feat, mAnno);
aJCas.addFsToIndexes(annos.get(i));
setAnnoRefPerUnit(unit, type, ref, annos.get(i));
}
}
}
if (type.getName().equals(POS.class.getName())) {
units2Tokens.get(unit).setPos((POS) annos.get(i));
}
if (type.getName().equals(Lemma.class.getName())) {
units2Tokens.get(unit).setLemma((Lemma) annos.get(i));
}
i++;
}
if(targetAdd){
linkFSesPerSlotAnno = new HashMap<>();
}
}
j++;
}
}
}
}
private String getEscapeChars(String aAnno) {
if(aAnno==null){
return null;
}
return aAnno.replace(WEBANNO_BAR, "|").replace(WEBANNO_UNDERSCORE, "_")
.replace(WEBANNO_RBR, "[").replace(WEBANNO_LBR, "]");
}
/**
* update a base annotation with slot annotations
*
* @param linkFSesPerAnno
* contains list of slot annotations per a base annotation
* @param aLinkeF
* The link slot annotation feature
*/
private void addSlotAnnotations(Map<AnnotationFS, List<FeatureStructure>> linkFSesPerAnno, Feature aLinkeF) {
for (AnnotationFS anno : linkFSesPerAnno.keySet()) {
ArrayFS array = anno.getCAS().createArrayFS(linkFSesPerAnno.get(anno).size());
array.copyFromArray(
linkFSesPerAnno.get(anno).toArray(new FeatureStructure[linkFSesPerAnno.get(anno).size()]), 0, 0,
linkFSesPerAnno.get(anno).size());
anno.setFeatureValue(aLinkeF, array);
anno.getCAS().addFsToIndexes(anno);
}
}
/**
* Gets annotations from lines (of {@link AnnotationUnit}s) and save for the
* later access, while reading the document the first time. <br>
*
* @param lines
* TSV lines exported from WebAnno
* @param unit
* the annotation unit (Token or sub-tokens)
* @param ind
* index of the annotation, from the TAB separated annotations in
* the TSV lines
*/
private void setAnnosPerTypePerUnit(String[] lines, AnnotationUnit unit, int ind) {
for (Type type : allLayers.keySet()) {
annotationsPerPostion.putIfAbsent(type, new LinkedHashMap<>());
for (Feature f : allLayers.get(type)) {
annotationsPerPostion.get(type).put(unit,
annotationsPerPostion.get(type).getOrDefault(unit, new ArrayList<>()));
annotationsPerPostion.get(type).get(unit).add(lines[ind]);
ind++;
}
}
}
private void setAnnosPerUnit(JCas aJCas, Map<Type, Map<AnnotationUnit, List<AnnotationFS>>> aAnnosPerTypePerUnit) {
for (Type type : annotationsPerPostion.keySet()) {
Map<AnnotationUnit, List<AnnotationFS>> annosPerUnit = new HashMap<>();
for (AnnotationUnit unit : annotationsPerPostion.get(type).keySet()) {
int begin = unit.begin;
int end = unit.end;
List<AnnotationFS> annos = new ArrayList<>();
// if there are multiple annos
int multAnnos = 1;
for (String anno : annotationsPerPostion.get(type).get(unit)) {
if (anno.split("\\|\\|").length > multAnnos) {
multAnnos = anno.split("\\|\\|").length;
}
}
for (int i = 0; i < multAnnos; i++) {
annos.add(aJCas.getCas().createAnnotation(type, begin, end));
}
annosPerUnit.put(unit, annos);
}
aAnnosPerTypePerUnit.put(type, annosPerUnit);
}
}
private void setAnnoRefPerUnit(AnnotationUnit unit, Type type, int ref, AnnotationFS aAnnoFs) {
annoUnitperAnnoFs.putIfAbsent(type, new HashMap<>());
annoUnitperAnnoFs.get(type).putIfAbsent(unit, new HashMap<>());
annoUnitperAnnoFs.get(type).get(unit).put(ref, aAnnoFs);
}
private AnnotationUnit createTokens(JCas aJCas, String[] lines, int begin, int end) {
if (!lines[0].startsWith("-")) {
Token token = new Token(aJCas, begin, end);
AnnotationUnit unit = new AnnotationUnit(begin, end, false, "");
units.add(unit);
token.addToIndexes();
token2Units.put(lines[0], unit);
units2Tokens.put(unit, token);
return unit;
} else {
AnnotationUnit unit = new AnnotationUnit(begin, end, true, "");
units.add(unit);
token2Units.put(lines[0], unit);
return unit;
}
}
private void createSentence(JCas aJCas, String line) {
String text = line.substring(6);
String beginEnd = text.substring(0, text.indexOf("#"));
text = text.substring(text.indexOf("#") + 1);
int begin = Integer.parseInt(beginEnd.split("-")[0]);
int end = Integer.parseInt(beginEnd.split("-")[1]);
coveredText.append(text + LF);
Sentence sentence = new Sentence(aJCas, begin, end);
sentence.addToIndexes();
}
/**
* Get the type and feature information from the TSV file header
*
* @param aJcas
* @param header
* the header line
* @throws IOException
* If the type or the feature do not exist in the CAs
*/
private void setLayerAndFeature(JCas aJcas, String header) throws IOException {
try {
StringTokenizer headerTk = new StringTokenizer(header, "#");
while (headerTk.hasMoreTokens()) {
String layerNames = headerTk.nextToken().trim();
StringTokenizer layerTk = new StringTokenizer(layerNames, "|");
Set<Feature> features = new LinkedHashSet<Feature>();
String layerName = layerTk.nextToken().trim();
layerName = layerName.substring(layerName.indexOf("=") + 1);
Iterator<Type> types = aJcas.getTypeSystem().getTypeIterator();
boolean layerExists = false;
while (types.hasNext()) {
if (types.next().getName().equals(layerName)) {
layerExists = true;
break;
}
}
if (!layerExists) {
throw new IOException(fileName + " This is not a valid TSV File. The layer " + layerName
+ " is not created in the project.");
}
Type layer = CasUtil.getType(aJcas.getCas(), layerName);
while (layerTk.hasMoreTokens()) {
String ft = layerTk.nextToken().trim();
columns++;
Feature feature;
if (ft.startsWith(BT)) {
feature = layer.getFeatureByBaseName(DEPENDENT);
depFeatures.put(layer, feature);
depTypess.put(layer, CasUtil.getType(aJcas.getCas(), ft.substring(3)));
} else {
feature = layer.getFeatureByBaseName(ft);
}
if (ft.startsWith(ROLE)) {
ft = ft.substring(5);
String t = layerTk.nextToken().toString();
columns++;
Type tType = CasUtil.getType(aJcas.getCas(), t);
String fName = ft.substring(0, ft.indexOf("_"));
Feature slotF = layer.getFeatureByBaseName(fName.substring(fName.indexOf(":") + 1));
if (slotF == null) {
throw new IOException(fileName + " This is not a valid TSV File. The feature " + ft
+ " is not created for the layer " + layerName);
}
features.add(slotF);
roleLinks.put(slotF, tType);
Type slotType = CasUtil.getType(aJcas.getCas(), ft.substring(ft.indexOf("_") + 1));
Feature tFeatore = slotType.getFeatureByBaseName("target");
if (tFeatore == null) {
throw new IOException(fileName + " This is not a valid TSV File. The feature " + ft
+ " is not created for the layer " + layerName);
}
roleTargets.put(tFeatore, tType);
features.add(tFeatore);
slotLinkTypes.put(slotF, slotType);
continue;
}
if (feature == null) {
throw new IOException(fileName + " This is not a valid TSV File. The feature " + ft
+ " is not created for the layer " + layerName);
}
features.add(feature);
}
allLayers.put(layer, features);
layerMaps.put(layerMaps.size()+1, layer);
}
} catch (Exception e) {
throw new IOException(e.getMessage() + "\nTSV header:\n" + header);
}
}
public static final String PARAM_ENCODING = ComponentParameters.PARAM_SOURCE_ENCODING;
@ConfigurationParameter(name = PARAM_ENCODING, mandatory = true, defaultValue = "UTF-8")
private String encoding;
@Override
public void getNext(JCas aJCas) throws IOException, CollectionException {
Resource res = nextFile();
initCas(aJCas, res);
InputStream is = null;
try {
is = res.getInputStream();
convertToCas(aJCas, is, encoding);
} finally {
closeQuietly(is);
}
}
}
| #176 - escape special characters for TSV import/Export | webanno-tsv/src/main/java/de/tudarmstadt/ukp/clarin/webanno/tsv/WebannoTsv3Reader.java | #176 - escape special characters for TSV import/Export | <ide><path>ebanno-tsv/src/main/java/de/tudarmstadt/ukp/clarin/webanno/tsv/WebannoTsv3Reader.java
<ide> package de.tudarmstadt.ukp.clarin.webanno.tsv;
<ide>
<ide> import static org.apache.commons.io.IOUtils.closeQuietly;
<add>import static org.apache.commons.lang.StringEscapeUtils.unescapeJava;
<ide>
<ide> import java.io.IOException;
<ide> import java.io.InputStream;
<ide> import java.util.Set;
<ide> import java.util.StringTokenizer;
<ide> import java.util.TreeMap;
<add>import java.util.regex.Pattern;
<ide>
<ide> import org.apache.commons.io.IOUtils;
<ide> import org.apache.commons.io.LineIterator;
<ide> public static final String ROLE = "ROLE_";
<ide> public static final String BT = "BT_"; // base type for the relation
<ide> // annotation
<del> // If | is used as annotation, escape it with `|` and replace it with WEBANNO_BAR for processing
<del> // WEBANNO_BAR will be a reserved word, the same for [, ], and _
<del> private static String WEBANNO_BAR = "WEBANNOBAR";
<del> private static String WEBANNO_RBR = "WEBANNORBR";
<del> private static String WEBANNO_LBR = "WEBANNOLBR";
<del> private static String WEBANNO_UNDERSCORE = "WEBANNOUNDERSCORE";
<del>
<ide> private static final String DEPENDENT = "Dependent";
<ide> private static final String GOVERNOR = "Governor";
<ide>
<ide> if (line.trim().isEmpty()) {
<ide> continue;
<ide> }
<del>
<del> // replace the `|` with WEBANNOBAR
<del> line = replaceEscapeChars(line);
<ide>
<ide> int count = StringUtils.countMatches(line, "\t");
<ide>
<ide> if (columns != count) {
<ide> throw new IOException(fileName + " This is not a valid TSV File. check this line: " + line);
<ide> }
<del> String[] lines = line.split(TAB);
<del>
<add>
<add> String regex = "(?<!\\\\)" + Pattern.quote(TAB);
<add> String[] lines = line.split(regex);
<add>
<ide> int begin = Integer.parseInt(lines[1].split("-")[0]);
<ide> int end = Integer.parseInt(lines[1].split("-")[1]);
<ide>
<ide> setAnnosPerUnit(aJCas, annosPerTypePerUnit);
<ide> addAnnotations(aJCas, annosPerTypePerUnit);
<ide> addChainAnnotations(aJCas);
<del> }
<del>
<del> private String replaceEscapeChars(String line) {
<del> // because these characters are used to separate multiple annotations, empty annotations...
<del> line = line.replace("\\|", WEBANNO_BAR).replace("\\_", WEBANNO_UNDERSCORE)
<del> .replace("\\[", WEBANNO_RBR).replace("\\]", WEBANNO_LBR);
<del> return line;
<ide> }
<ide>
<ide> /**
<ide> // (Target1<--role1--Base--role2-->Target2)
<ide> int slot = 0;
<ide> boolean targetAdd = false;
<del> for (String mAnnos : anno.split("\\|\\|")) {
<del> for (String mAnno : mAnnos.split("\\|")) {
<add> String stackedAnnoRegex = "(?<!\\\\)" + Pattern.quote("||");
<add> for (String mAnnos : anno.split(stackedAnnoRegex)) {
<add> String multipleAnnoRegex = "(?<!\\\\)" + Pattern.quote("|");
<add> for (String mAnno : mAnnos.split(multipleAnnoRegex)) {
<ide> int ref = 1;
<ide> String depRef = "";
<ide> if (mAnno.endsWith("]")) {
<ide> if(aAnno==null){
<ide> return null;
<ide> }
<del> return aAnno.replace(WEBANNO_BAR, "|").replace(WEBANNO_UNDERSCORE, "_")
<del> .replace(WEBANNO_RBR, "[").replace(WEBANNO_LBR, "]");
<add>
<add> return unescapeJava(aAnno);
<ide> }
<ide>
<ide> /**
<ide> // if there are multiple annos
<ide> int multAnnos = 1;
<ide> for (String anno : annotationsPerPostion.get(type).get(unit)) {
<del>
<del> if (anno.split("\\|\\|").length > multAnnos) {
<del> multAnnos = anno.split("\\|\\|").length;
<add> String stackedAnnoRegex = "(?<!\\\\)" + Pattern.quote("||");
<add> if (anno.split(stackedAnnoRegex).length > multAnnos) {
<add> multAnnos = anno.split(stackedAnnoRegex).length;
<ide> }
<ide> }
<ide> |
|
Java | mit | 2c9d6fe38c7130ee4b1802961e7d38570853c7de | 0 | Upinion/react-native-couchbase-lite,Upinion/react-native-couchbase-lite | package com.upinion.CouchBase;
import android.content.Intent;
import android.content.Context;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.Callback;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.modules.core.JavascriptException;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import com.couchbase.lite.android.AndroidContext;
import com.couchbase.lite.CouchbaseLiteException;
import com.couchbase.lite.Database;
import com.couchbase.lite.DocumentChange;
import com.couchbase.lite.Manager;
import com.couchbase.lite.replicator.Replication;
import com.couchbase.lite.listener.LiteListener;
import com.couchbase.lite.listener.LiteServlet;
import com.couchbase.lite.listener.Credentials;
import com.couchbase.lite.router.URLStreamHandlerFactory;
import com.couchbase.lite.View;
import com.couchbase.lite.javascript.JavaScriptViewCompiler;
import com.couchbase.lite.util.Log;
import com.couchbase.lite.auth.Authenticator;
import com.couchbase.lite.auth.AuthenticatorFactory;
import com.couchbase.lite.replicator.RemoteRequestResponseException;
import java.util.Map;
import java.util.HashMap;
import java.io.IOException;
import java.net.URL;
public class CouchBase extends ReactContextBaseJavaModule {
private ReactApplicationContext context;
private Manager managerServer;
private boolean initFailed = false;
private int listenPort;
protected Boolean isDebug = false;
private static final String PUSH_EVENT_KEY = "couchBasePushEvent";
private static final String PULL_EVENT_KEY = "couchBasePullEvent";
private static final String DB_EVENT_KEY = "couchBaseDBEvent";
private static final String AUTH_ERROR_KEY = "couchbBaseAuthError";
public static final String TAG = "CouchBase";
/**
* Constructor for the Native Module
* @param reactContext React context object to comunicate with React-native
*/
public CouchBase(ReactApplicationContext reactContext) {
super(reactContext);
this.context = reactContext;
// Register the JavaScript view compiler
View.setCompiler(new JavaScriptViewCompiler());
}
/**
* Returns the name of this module in React-native (javascript)
*/
@Override
public String getName() {
return TAG;
}
/**
* Returns constants of this module in React-native to share (javascript)
*/
@Override
public Map<String, Object> getConstants() {
final Map<String, Object> constants = new HashMap<>();
constants.put("PUSH", PUSH_EVENT_KEY);
constants.put("PULL", PULL_EVENT_KEY);
constants.put("DBChanged", DB_EVENT_KEY);
constants.put("AuthError", AUTH_ERROR_KEY);
return constants;
}
/**
* Function to be shared to React-native, it starts a local couchbase server
* @param listen_port Integer port to start server
* @param userLocal String user for local server
* @param passwordLocal String password for local server
* @param databaseLocal String database for local server
* @param onEnd Callback function to call when finish
*/
@ReactMethod
public void serverLocal(Integer listen_port, String userLocal, String passwordLocal, Callback onEnd) {
startServer(listen_port, userLocal, passwordLocal);
if(onEnd != null)
onEnd.invoke(this.listenPort);
}
/**
* Function to be shared to React-native, it starts a local couchbase server and syncs with remote
* @param listen_port Integer port to start server
* @param userLocal String user for local server
* @param passwordLocal String password for local server
* @param databaseLocal String database for local server
* @param remoteURL String URL to remote couchbase
* @param remoteUser String user for remote server
* @param remotePassword String password for remote server
* @param events Boolean activate the events for push and pull
* @param onEnd Callback function to call when finish
*/
@ReactMethod
public void serverLocalRemote(Integer listen_port, String userLocal, String passwordLocal, String databaseLocal,
String remoteURL, String remoteUser, String remotePassword, Boolean events,
Callback onEnd) {
startServer(listen_port, userLocal, passwordLocal);
Manager ss = this.managerServer;
if(!(databaseLocal != null && remoteURL != null && remoteUser != null && remotePassword != null))
throw new JavascriptException("CouchBase Server bad arguments");
try {
URL url = new URL(remoteURL);
Database db = ss.getDatabase(databaseLocal);
Replication push = db.createPushReplication(url);
Replication pull = db.createPullReplication(url);
pull.setContinuous(true);
push.setContinuous(true);
Authenticator basicAuthenticator = AuthenticatorFactory.createBasicAuthenticator(remoteUser, remotePassword);
pull.setAuthenticator(basicAuthenticator);
push.setAuthenticator(basicAuthenticator);
if (events) {
push.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getError() != null) {
Throwable lastError = event.getError();
if (lastError instanceof RemoteRequestResponseException) {
RemoteRequestResponseException exception = (RemoteRequestResponseException) lastError;
if (exception.getCode() == 401) {
// Authentication error
WritableMap eventError = Arguments.createMap();
sendEvent(context, AUTH_ERROR_KEY, eventError);
}
}
} else {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
sendEvent(context, PUSH_EVENT_KEY, eventM);
}
}
});
pull.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getError() != null) {
Throwable lastError = event.getError();
if (lastError instanceof RemoteRequestResponseException) {
RemoteRequestResponseException exception = (RemoteRequestResponseException) lastError;
if (exception.getCode() == 401) {
// Authentication error
WritableMap eventError = Arguments.createMap();
sendEvent(context, AUTH_ERROR_KEY, eventError);
}
}
} else {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
sendEvent(context, PULL_EVENT_KEY, eventM);
}
}
});
db.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
for (DocumentChange dc : event.getChanges()) {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getName());
eventM.putString("id", dc.getDocumentId());
sendEvent(context, DB_EVENT_KEY, eventM);
}
}
});
}
push.start();
pull.start();
if (onEnd != null)
onEnd.invoke(this.listenPort);
}catch(Exception e){
throw new JavascriptException(e.getMessage());
}
}
/**
* Function to be shared to React-native, it starts already created local db syncing with remote
* @param databaseLocal String database for local server
* @param remoteURL String URL to remote couchbase
* @param remoteUser String user for remote server
* @param remotePassword String password for remote server
* @param events Boolean activate the events for push and pull
* @param onEnd Callback function to call when finish
*/
@ReactMethod
public void serverRemote(String databaseLocal, String remoteURL, String remoteUser,
String remotePassword, Boolean events, Callback onEnd) {
Manager ss = this.managerServer;
if(ss == null)
throw new JavascriptException("CouchBase local server needs to be started first");
if(!(databaseLocal != null && remoteURL != null && remoteUser != null && remotePassword != null))
throw new JavascriptException("CouchBase Server bad arguments");
try {
URL url = new URL(remoteURL);
Database db = ss.getDatabase(databaseLocal);
Replication push = db.createPushReplication(url);
Replication pull = db.createPullReplication(url);
pull.setContinuous(true);
push.setContinuous(true);
Authenticator basicAuthenticator = AuthenticatorFactory.createBasicAuthenticator(remoteUser, remotePassword);
pull.setAuthenticator(basicAuthenticator);
push.setAuthenticator(basicAuthenticator);
if (events) {
push.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getError() != null) {
Throwable lastError = event.getError();
if (lastError instanceof RemoteRequestResponseException) {
RemoteRequestResponseException exception = (RemoteRequestResponseException) lastError;
if (exception.getCode() == 401) {
// Authentication error
WritableMap eventError = Arguments.createMap();
sendEvent(context, AUTH_ERROR_KEY, eventError);
}
}
} else {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
sendEvent(context, PUSH_EVENT_KEY, eventM);
}
}
});
pull.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getError() != null) {
Throwable lastError = event.getError();
if (lastError instanceof RemoteRequestResponseException) {
RemoteRequestResponseException exception = (RemoteRequestResponseException) lastError;
if (exception.getCode() == 401) {
// Authentication error
WritableMap eventError = Arguments.createMap();
sendEvent(context, AUTH_ERROR_KEY, eventError);
}
}
} else {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
sendEvent(context, PULL_EVENT_KEY, eventM);
}
}
});
db.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
for (DocumentChange dc : event.getChanges()) {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getName());
eventM.putString("id", dc.getDocumentId());
sendEvent(context, DB_EVENT_KEY, eventM);
}
}
});
}
push.start();
pull.start();
if (onEnd != null)
onEnd.invoke();
}catch(Exception e){
throw new JavascriptException(e.getMessage());
}
}
/**
* Function to be shared to React-native, compacts an already created local database
* @param databaseLocal String database for local server
*/
@ReactMethod
public void compact(String databaseLocal) {
Manager ss = this.managerServer;
if(ss == null)
throw new JavascriptException("CouchBase local server needs to be started first");
if(databaseLocal == null)
throw new JavascriptException("CouchBase Server bad arguments");
try {
Database db = ss.getDatabase(databaseLocal);
db.compact();
}catch(Exception e){
throw new JavascriptException(e.getMessage());
}
}
/**
* Enable debug log for CBL
* @param debug_mode boolean debug module for develop: true for VERBOSE log, false for Default log level.
* */
@ReactMethod
public void enableLog(boolean debug_mode) {
isDebug = new Boolean(debug_mode);
}
/**
* Private functions to create couchbase server
*/
private void startServer(Integer listen_port, String userLocal, String passwordLocal) throws JavascriptException{
if(!(listen_port != null && userLocal != null && passwordLocal != null))
throw new JavascriptException("CouchBase Server bad arguments");
Manager server;
try {
Credentials allowedCredentials = new Credentials(userLocal, passwordLocal);
URLStreamHandlerFactory.registerSelfIgnoreError();
server = startCBLite();
listenPort = startCBLListener( listen_port, server, allowedCredentials);
} catch (Exception e) {
throw new JavascriptException(e.getMessage());
}
this.managerServer = server;
}
private Manager startCBLite() throws IOException {
if (this.isDebug){
Manager.enableLogging(TAG, Log.VERBOSE);
Manager.enableLogging(Log.TAG, Log.VERBOSE);
Manager.enableLogging(Log.TAG_SYNC, Log.VERBOSE);
Manager.enableLogging(Log.TAG_SYNC_ASYNC_TASK, Log.VERBOSE);
Manager.enableLogging(Log.TAG_BATCHER, Log.VERBOSE);
Manager.enableLogging(Log.TAG_QUERY, Log.VERBOSE);
Manager.enableLogging(Log.TAG_VIEW, Log.VERBOSE);
Manager.enableLogging(Log.TAG_CHANGE_TRACKER, Log.VERBOSE);
Manager.enableLogging(Log.TAG_BLOB_STORE, Log.VERBOSE);
Manager.enableLogging(Log.TAG_DATABASE, Log.VERBOSE);
Manager.enableLogging(Log.TAG_LISTENER, Log.VERBOSE);
Manager.enableLogging(Log.TAG_MULTI_STREAM_WRITER, Log.VERBOSE);
Manager.enableLogging(Log.TAG_REMOTE_REQUEST, Log.VERBOSE);
Manager.enableLogging(Log.TAG_ROUTER, Log.VERBOSE);
}
return new Manager( new AndroidContext(this.context), Manager.DEFAULT_OPTIONS);
}
private int startCBLListener(int listenPort, Manager manager, Credentials allowedCredentials) {
LiteListener listener = new LiteListener(manager, listenPort, allowedCredentials);
int boundPort = listener.getListenPort();
Thread thread = new Thread(listener);
thread.start();
return boundPort;
}
/**
* Function to send: push pull events
*/
private void sendEvent(ReactContext reactContext, String eventName, WritableMap params) {
reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(eventName, params);
}
}
| android/src/main/java/com/CouchBase/CouchBase.java | package com.upinion.CouchBase;
import android.content.Intent;
import android.content.Context;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.Callback;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.modules.core.JavascriptException;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import com.couchbase.lite.android.AndroidContext;
import com.couchbase.lite.CouchbaseLiteException;
import com.couchbase.lite.Database;
import com.couchbase.lite.DocumentChange;
import com.couchbase.lite.Manager;
import com.couchbase.lite.replicator.Replication;
import com.couchbase.lite.listener.LiteListener;
import com.couchbase.lite.listener.LiteServlet;
import com.couchbase.lite.listener.Credentials;
import com.couchbase.lite.router.URLStreamHandlerFactory;
import com.couchbase.lite.View;
import com.couchbase.lite.javascript.JavaScriptViewCompiler;
import com.couchbase.lite.util.Log;
import com.couchbase.lite.auth.Authenticator;
import com.couchbase.lite.auth.AuthenticatorFactory;
import java.util.Map;
import java.util.HashMap;
import java.io.IOException;
import java.net.URL;
public class CouchBase extends ReactContextBaseJavaModule {
private ReactApplicationContext context;
private Manager managerServer;
private boolean initFailed = false;
private int listenPort;
protected Boolean isDebug = false;
private static final String PUSH_EVENT_KEY = "couchBasePushEvent";
private static final String PULL_EVENT_KEY = "couchBasePullEvent";
private static final String DB_EVENT_KEY = "couchBaseDBEvent";
public static final String TAG = "CouchBase";
/**
* Constructor for the Native Module
* @param reactContext React context object to comunicate with React-native
*/
public CouchBase(ReactApplicationContext reactContext) {
super(reactContext);
this.context = reactContext;
// Register the JavaScript view compiler
View.setCompiler(new JavaScriptViewCompiler());
}
/**
* Returns the name of this module in React-native (javascript)
*/
@Override
public String getName() {
return TAG;
}
/**
* Returns constants of this module in React-native to share (javascript)
*/
@Override
public Map<String, Object> getConstants() {
final Map<String, Object> constants = new HashMap<>();
constants.put("PUSH", PUSH_EVENT_KEY);
constants.put("PULL", PULL_EVENT_KEY);
constants.put("DBChanged", DB_EVENT_KEY);
return constants;
}
/**
* Function to be shared to React-native, it starts a local couchbase server
* @param listen_port Integer port to start server
* @param userLocal String user for local server
* @param passwordLocal String password for local server
* @param databaseLocal String database for local server
* @param onEnd Callback function to call when finish
*/
@ReactMethod
public void serverLocal(Integer listen_port, String userLocal, String passwordLocal, Callback onEnd) {
startServer(listen_port, userLocal, passwordLocal);
if(onEnd != null)
onEnd.invoke(this.listenPort);
}
/**
* Function to be shared to React-native, it starts a local couchbase server and syncs with remote
* @param listen_port Integer port to start server
* @param userLocal String user for local server
* @param passwordLocal String password for local server
* @param databaseLocal String database for local server
* @param remoteURL String URL to remote couchbase
* @param remoteUser String user for remote server
* @param remotePassword String password for remote server
* @param events Boolean activate the events for push and pull
* @param onEnd Callback function to call when finish
*/
@ReactMethod
public void serverLocalRemote(Integer listen_port, String userLocal, String passwordLocal, String databaseLocal,
String remoteURL, String remoteUser, String remotePassword, Boolean events,
Callback onEnd) {
startServer(listen_port, userLocal, passwordLocal);
Manager ss = this.managerServer;
if(!(databaseLocal != null && remoteURL != null && remoteUser != null && remotePassword != null))
throw new JavascriptException("CouchBase Server bad arguments");
try {
URL url = new URL(remoteURL);
Database db = ss.getDatabase(databaseLocal);
Replication push = db.createPushReplication(url);
Replication pull = db.createPullReplication(url);
pull.setContinuous(true);
push.setContinuous(true);
Authenticator basicAuthenticator = AuthenticatorFactory.createBasicAuthenticator(remoteUser, remotePassword);
pull.setAuthenticator(basicAuthenticator);
push.setAuthenticator(basicAuthenticator);
if (events) {
push.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
sendEvent(context, PUSH_EVENT_KEY, eventM);
}
});
pull.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
sendEvent(context, PULL_EVENT_KEY, eventM);
}
});
db.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
for (DocumentChange dc : event.getChanges()) {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getName());
eventM.putString("id", dc.getDocumentId());
sendEvent(context, DB_EVENT_KEY, eventM);
}
}
});
}
push.start();
pull.start();
if (onEnd != null)
onEnd.invoke(this.listenPort);
}catch(Exception e){
throw new JavascriptException(e.getMessage());
}
}
/**
* Function to be shared to React-native, it starts already created local db syncing with remote
* @param databaseLocal String database for local server
* @param remoteURL String URL to remote couchbase
* @param remoteUser String user for remote server
* @param remotePassword String password for remote server
* @param events Boolean activate the events for push and pull
* @param onEnd Callback function to call when finish
*/
@ReactMethod
public void serverRemote(String databaseLocal, String remoteURL, String remoteUser,
String remotePassword, Boolean events, Callback onEnd) {
Manager ss = this.managerServer;
if(ss == null)
throw new JavascriptException("CouchBase local server needs to be started first");
if(!(databaseLocal != null && remoteURL != null && remoteUser != null && remotePassword != null))
throw new JavascriptException("CouchBase Server bad arguments");
try {
URL url = new URL(remoteURL);
Database db = ss.getDatabase(databaseLocal);
Replication push = db.createPushReplication(url);
Replication pull = db.createPullReplication(url);
pull.setContinuous(true);
push.setContinuous(true);
Authenticator basicAuthenticator = AuthenticatorFactory.createBasicAuthenticator(remoteUser, remotePassword);
pull.setAuthenticator(basicAuthenticator);
push.setAuthenticator(basicAuthenticator);
if (events) {
push.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
sendEvent(context, PUSH_EVENT_KEY, eventM);
}
});
pull.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
sendEvent(context, PULL_EVENT_KEY, eventM);
}
});
db.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
for (DocumentChange dc : event.getChanges()) {
WritableMap eventM = Arguments.createMap();
eventM.putString("databaseName", event.getSource().getName());
eventM.putString("id", dc.getDocumentId());
sendEvent(context, DB_EVENT_KEY, eventM);
}
}
});
}
push.start();
pull.start();
if (onEnd != null)
onEnd.invoke();
}catch(Exception e){
throw new JavascriptException(e.getMessage());
}
}
/**
* Function to be shared to React-native, compacts an already created local database
* @param databaseLocal String database for local server
*/
@ReactMethod
public void compact(String databaseLocal) {
Manager ss = this.managerServer;
if(ss == null)
throw new JavascriptException("CouchBase local server needs to be started first");
if(databaseLocal == null)
throw new JavascriptException("CouchBase Server bad arguments");
try {
Database db = ss.getDatabase(databaseLocal);
db.compact();
}catch(Exception e){
throw new JavascriptException(e.getMessage());
}
}
/**
* Enable debug log for CBL
* @param debug_mode boolean debug module for develop: true for VERBOSE log, false for Default log level.
* */
@ReactMethod
public void enableLog(boolean debug_mode) {
isDebug = new Boolean(debug_mode);
}
/**
* Private functions to create couchbase server
*/
private void startServer(Integer listen_port, String userLocal, String passwordLocal) throws JavascriptException{
if(!(listen_port != null && userLocal != null && passwordLocal != null))
throw new JavascriptException("CouchBase Server bad arguments");
Manager server;
try {
Credentials allowedCredentials = new Credentials(userLocal, passwordLocal);
URLStreamHandlerFactory.registerSelfIgnoreError();
server = startCBLite();
listenPort = startCBLListener( listen_port, server, allowedCredentials);
} catch (Exception e) {
throw new JavascriptException(e.getMessage());
}
this.managerServer = server;
}
private Manager startCBLite() throws IOException {
if (this.isDebug){
Manager.enableLogging(TAG, Log.VERBOSE);
Manager.enableLogging(Log.TAG, Log.VERBOSE);
Manager.enableLogging(Log.TAG_SYNC, Log.VERBOSE);
Manager.enableLogging(Log.TAG_SYNC_ASYNC_TASK, Log.VERBOSE);
Manager.enableLogging(Log.TAG_BATCHER, Log.VERBOSE);
Manager.enableLogging(Log.TAG_QUERY, Log.VERBOSE);
Manager.enableLogging(Log.TAG_VIEW, Log.VERBOSE);
Manager.enableLogging(Log.TAG_CHANGE_TRACKER, Log.VERBOSE);
Manager.enableLogging(Log.TAG_BLOB_STORE, Log.VERBOSE);
Manager.enableLogging(Log.TAG_DATABASE, Log.VERBOSE);
Manager.enableLogging(Log.TAG_LISTENER, Log.VERBOSE);
Manager.enableLogging(Log.TAG_MULTI_STREAM_WRITER, Log.VERBOSE);
Manager.enableLogging(Log.TAG_REMOTE_REQUEST, Log.VERBOSE);
Manager.enableLogging(Log.TAG_ROUTER, Log.VERBOSE);
}
return new Manager( new AndroidContext(this.context), Manager.DEFAULT_OPTIONS);
}
private int startCBLListener(int listenPort, Manager manager, Credentials allowedCredentials) {
LiteListener listener = new LiteListener(manager, listenPort, allowedCredentials);
int boundPort = listener.getListenPort();
Thread thread = new Thread(listener);
thread.start();
return boundPort;
}
/**
* Function to send: push pull events
*/
private void sendEvent(ReactContext reactContext, String eventName, WritableMap params) {
reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit(eventName, params);
}
}
| Detects auth error on Replication event | android/src/main/java/com/CouchBase/CouchBase.java | Detects auth error on Replication event | <ide><path>ndroid/src/main/java/com/CouchBase/CouchBase.java
<ide> import com.couchbase.lite.util.Log;
<ide> import com.couchbase.lite.auth.Authenticator;
<ide> import com.couchbase.lite.auth.AuthenticatorFactory;
<add>import com.couchbase.lite.replicator.RemoteRequestResponseException;
<ide>
<ide> import java.util.Map;
<ide> import java.util.HashMap;
<ide> private static final String PUSH_EVENT_KEY = "couchBasePushEvent";
<ide> private static final String PULL_EVENT_KEY = "couchBasePullEvent";
<ide> private static final String DB_EVENT_KEY = "couchBaseDBEvent";
<add> private static final String AUTH_ERROR_KEY = "couchbBaseAuthError";
<ide> public static final String TAG = "CouchBase";
<ide>
<ide> /**
<ide> constants.put("PUSH", PUSH_EVENT_KEY);
<ide> constants.put("PULL", PULL_EVENT_KEY);
<ide> constants.put("DBChanged", DB_EVENT_KEY);
<add> constants.put("AuthError", AUTH_ERROR_KEY);
<ide> return constants;
<ide> }
<ide> /**
<ide> push.addChangeListener(new Replication.ChangeListener() {
<ide> @Override
<ide> public void changed(Replication.ChangeEvent event) {
<del> WritableMap eventM = Arguments.createMap();
<del> eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
<del> eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
<del> eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
<del> sendEvent(context, PUSH_EVENT_KEY, eventM);
<add> if (event.getError() != null) {
<add> Throwable lastError = event.getError();
<add> if (lastError instanceof RemoteRequestResponseException) {
<add> RemoteRequestResponseException exception = (RemoteRequestResponseException) lastError;
<add> if (exception.getCode() == 401) {
<add> // Authentication error
<add> WritableMap eventError = Arguments.createMap();
<add> sendEvent(context, AUTH_ERROR_KEY, eventError);
<add> }
<add> }
<add> } else {
<add> WritableMap eventM = Arguments.createMap();
<add> eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
<add> eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
<add> eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
<add> sendEvent(context, PUSH_EVENT_KEY, eventM);
<add> }
<ide> }
<ide> });
<ide> pull.addChangeListener(new Replication.ChangeListener() {
<ide> @Override
<ide> public void changed(Replication.ChangeEvent event) {
<del> WritableMap eventM = Arguments.createMap();
<del> eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
<del> eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
<del> eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
<del> sendEvent(context, PULL_EVENT_KEY, eventM);
<add> if (event.getError() != null) {
<add> Throwable lastError = event.getError();
<add> if (lastError instanceof RemoteRequestResponseException) {
<add> RemoteRequestResponseException exception = (RemoteRequestResponseException) lastError;
<add> if (exception.getCode() == 401) {
<add> // Authentication error
<add> WritableMap eventError = Arguments.createMap();
<add> sendEvent(context, AUTH_ERROR_KEY, eventError);
<add> }
<add> }
<add> } else {
<add> WritableMap eventM = Arguments.createMap();
<add> eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
<add> eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
<add> eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
<add> sendEvent(context, PULL_EVENT_KEY, eventM);
<add> }
<ide> }
<ide> });
<ide> db.addChangeListener(new Database.ChangeListener() {
<ide> push.addChangeListener(new Replication.ChangeListener() {
<ide> @Override
<ide> public void changed(Replication.ChangeEvent event) {
<del> WritableMap eventM = Arguments.createMap();
<del> eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
<del> eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
<del> eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
<del> sendEvent(context, PUSH_EVENT_KEY, eventM);
<add> if (event.getError() != null) {
<add> Throwable lastError = event.getError();
<add> if (lastError instanceof RemoteRequestResponseException) {
<add> RemoteRequestResponseException exception = (RemoteRequestResponseException) lastError;
<add> if (exception.getCode() == 401) {
<add> // Authentication error
<add> WritableMap eventError = Arguments.createMap();
<add> sendEvent(context, AUTH_ERROR_KEY, eventError);
<add> }
<add> }
<add> } else {
<add> WritableMap eventM = Arguments.createMap();
<add> eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
<add> eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
<add> eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
<add> sendEvent(context, PUSH_EVENT_KEY, eventM);
<add> }
<ide> }
<ide> });
<ide> pull.addChangeListener(new Replication.ChangeListener() {
<ide> @Override
<ide> public void changed(Replication.ChangeEvent event) {
<del> WritableMap eventM = Arguments.createMap();
<del> eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
<del> eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
<del> eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
<del> sendEvent(context, PULL_EVENT_KEY, eventM);
<add> if (event.getError() != null) {
<add> Throwable lastError = event.getError();
<add> if (lastError instanceof RemoteRequestResponseException) {
<add> RemoteRequestResponseException exception = (RemoteRequestResponseException) lastError;
<add> if (exception.getCode() == 401) {
<add> // Authentication error
<add> WritableMap eventError = Arguments.createMap();
<add> sendEvent(context, AUTH_ERROR_KEY, eventError);
<add> }
<add> }
<add> } else {
<add> WritableMap eventM = Arguments.createMap();
<add> eventM.putString("databaseName", event.getSource().getLocalDatabase().getName());
<add> eventM.putString("changesCount", String.valueOf(event.getSource().getCompletedChangesCount()));
<add> eventM.putString("totalChanges", String.valueOf(event.getSource().getChangesCount()));
<add> sendEvent(context, PULL_EVENT_KEY, eventM);
<add> }
<ide> }
<ide> });
<ide> db.addChangeListener(new Database.ChangeListener() { |
|
JavaScript | mit | 6e1a9137d852378ad825e45f340b47a064988d44 | 0 | axelpale/tresdb,axelpale/tresdb | /* eslint-disable new-cap */
var local = require('../../../config/local');
var handlers = require('./handlers');
var jsonParser = require('body-parser').json();
// Token middleware. User can access the routes only with valid token.
// Token contents are stored in req.user.
// See https://github.com/auth0/express-jwt
var jwt = require('express-jwt');
var jwtParser = jwt({
secret: local.secret,
algorithms: ['HS256'],
});
var router = require('express').Router();
// Authentication
router.post('/', jsonParser, handlers.login);
// Password reset
router.post('/reset/email', jsonParser, handlers.sendResetPasswordEmail);
router.post('/reset', jwtParser, jsonParser, handlers.resetPassword);
// Change password
router.post('/password', jwtParser, jsonParser, handlers.changePassword);
// Invitation & post-invite sign up
router.post('/invite', jwtParser, jsonParser, handlers.sendInviteEmail);
router.post('/signup', jwtParser, jsonParser, handlers.signup);
module.exports = router;
| server/api/account/routes.js | /* eslint-disable new-cap */
var local = require('../../../config/local');
var handlers = require('./handlers');
var jsonParser = require('body-parser').json();
// Token middleware. User can access the routes only with valid token.
// Token contents are stored in req.user.
// See https://github.com/auth0/express-jwt
var jwt = require('express-jwt');
var jwtParser = jwt({ secret: local.secret, algorithms: ['HS256'] });
var router = require('express').Router();
// Authentication
router.post('/', jsonParser, handlers.login);
// Password reset
router.post('/reset/email', jsonParser, handlers.sendResetPasswordEmail);
router.post('/reset', jwtParser, jsonParser, handlers.resetPassword);
// Change password
router.post('/password', jwtParser, jsonParser, handlers.changePassword);
// Invitation & post-invite sign up
router.post('/invite', jwtParser, jsonParser, handlers.sendInviteEmail);
router.post('/signup', jwtParser, jsonParser, handlers.signup);
module.exports = router;
| Fix lint issues
| server/api/account/routes.js | Fix lint issues | <ide><path>erver/api/account/routes.js
<ide> // Token contents are stored in req.user.
<ide> // See https://github.com/auth0/express-jwt
<ide> var jwt = require('express-jwt');
<del>var jwtParser = jwt({ secret: local.secret, algorithms: ['HS256'] });
<add>var jwtParser = jwt({
<add> secret: local.secret,
<add> algorithms: ['HS256'],
<add>});
<ide>
<ide> var router = require('express').Router();
<ide> |
|
Java | agpl-3.0 | 6d88a7b6fd6be378d0ed545703cdca9e278e07d5 | 0 | berthoug/hopsworks,ErmiasG/hopsworks,FilotasSiskos/hopsworks,ErmiasG/hopsworks,AlexHopsworks/hopsworks,berthoug/hopsworks,AlexHopsworks/hopsworks,AlexHopsworks/hopsworks,berthoug/hopsworks,FilotasSiskos/hopsworks,berthoug/hopsworks,ErmiasG/hopsworks,FilotasSiskos/hopsworks,FilotasSiskos/hopsworks,berthoug/hopsworks,AlexHopsworks/hopsworks,ErmiasG/hopsworks,berthoug/hopsworks,FilotasSiskos/hopsworks,ErmiasG/hopsworks,FilotasSiskos/hopsworks,ErmiasG/hopsworks,AlexHopsworks/hopsworks,AlexHopsworks/hopsworks | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package se.kth.bbc.security.audit;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import javax.ejb.EJB;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import se.kth.bbc.activity.Activity;
import se.kth.bbc.activity.ActivityController;
import se.kth.bbc.activity.ActivityDetail;
import se.kth.bbc.activity.ActivityFacade;
import se.kth.bbc.lims.MessagesController;
import se.kth.bbc.security.audit.model.AccountAudit;
import se.kth.bbc.security.audit.model.RolesAudit;
import se.kth.bbc.security.audit.model.Userlogins;
import se.kth.bbc.security.ua.UserManager;
import se.kth.hopsworks.user.model.Users;
@ManagedBean
@ViewScoped
public class AuditTrails implements Serializable {
private static final long serialVersionUID = 1L;
@EJB
private UserManager userManager;
@EJB
private AuditManager auditManager;
@EJB
private ActivityFacade activityController;
private String username;
private Date from;
private Date to;
private AccountsAuditActions selectedAccountsAuditAction;
private RolesAuditActions selectdeRolesAuditAction;
private StudyAuditActions selectedStudyAuditAction;
private UserAuditActions selectedLoginsAuditAction;
private List<Userlogins> userLogins;
private List<RolesAudit> roleAudit;
private List<AccountAudit> accountAudit;
private List<Activity> ad;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public Date getFrom() {
return from;
}
public void setFrom(Date from) {
this.from = from;
}
public Date getTo() {
return to;
}
public void setTo(Date to) {
this.to = to;
}
public RolesAuditActions[] getAuditActions() {
return RolesAuditActions.values();
}
public List<Userlogins> getUserLogins() {
return userLogins;
}
public void setUserLogins(List<Userlogins> userLogins) {
this.userLogins = userLogins;
}
public List<RolesAudit> getRoleAudit() {
return roleAudit;
}
public void setRoleAudit(List<RolesAudit> roleAudit) {
this.roleAudit = roleAudit;
}
public List<AccountAudit> getAccountAudit() {
return accountAudit;
}
public void setAccountAudit(List<AccountAudit> accountAudit) {
this.accountAudit = accountAudit;
}
public AccountsAuditActions[] getAccountsAuditActions() {
return AccountsAuditActions.values();
}
public RolesAuditActions[] getRolesAuditActions() {
return RolesAuditActions.values();
}
public UserAuditActions[] getLoginsAuditActions() {
return UserAuditActions.values();
}
public StudyAuditActions[] getStudyAuditActions() {
return StudyAuditActions.values();
}
public AccountsAuditActions getSelectedAccountsAuditAction() {
return selectedAccountsAuditAction;
}
public void setSelectedAccountsAuditAction(
AccountsAuditActions selectedAccountsAuditAction) {
this.selectedAccountsAuditAction = selectedAccountsAuditAction;
}
public RolesAuditActions getSelectdeRolesAuditAction() {
return selectdeRolesAuditAction;
}
public void setSelectdeRolesAuditAction(
RolesAuditActions selectdeRolesAuditAction) {
this.selectdeRolesAuditAction = selectdeRolesAuditAction;
}
public StudyAuditActions getSelectedStudyAuditAction() {
return selectedStudyAuditAction;
}
public void setSelectedStudyAuditAction(
StudyAuditActions selectedStudyAuditAction) {
this.selectedStudyAuditAction = selectedStudyAuditAction;
}
public UserAuditActions getSelectedLoginsAuditAction() {
return selectedLoginsAuditAction;
}
public void setSelectedLoginsAuditAction(
UserAuditActions selectedLoginsAuditAction) {
this.selectedLoginsAuditAction = selectedLoginsAuditAction;
}
public List<Activity> getAd() {
return ad;
}
public void setAd(List<Activity> ad) {
this.ad = ad;
}
/**
* Generate audit report for account modifications.
* <p>
* @param username
* @param from
* @param to
* @param action
* @return
*/
public List<AccountAudit> getAccoutnAudit(String username, Date from, Date to,
String action) {
Users u = userManager.getUserByEmail(username);
if (u == null) {
return auditManager.getAccountAudit(convertTosqlDate(from),
convertTosqlDate(to), action);
} else {
return auditManager.getAccountAudit(u.getUid(), convertTosqlDate(from),
convertTosqlDate(to), action);
}
}
/**
*
* @param studyName
* @param from
* @param to
* @return
*/
public List<Activity> getStudyAudit(String studyName, Date from, Date to) {
return activityController.activityDetailOnStudyAudit(studyName,
convertTosqlDate(from), convertTosqlDate(to));
}
/**
* Generate audit report for role entitlement.
* <p>
* @param username
* @param from
* @param to
* @param action
* @return
*/
public List<RolesAudit> getRoleAudit(String username, Date from, Date to,
String action) {
Users u = userManager.getUserByEmail(username);
if (u == null) {
return auditManager.getRoletAudit(convertTosqlDate(from),
convertTosqlDate(to), action);
} else {
return auditManager.getRoletAudit(u.getUid(), convertTosqlDate(from),
convertTosqlDate(to), action);
}
}
/**
*
* @param username
* @param from
* @param to
* @param action
* @return
*/
public List<Userlogins> getUserLogins(String username, Date from, Date to,
String action) {
Users u = userManager.getUserByEmail(username);
if (u == null) {
return auditManager.getUsersLoginsFromTo(convertTosqlDate(from),
convertTosqlDate(to), action);
} else {
return auditManager.
getUserLoginsFromTo(u.getUid(), convertTosqlDate(from),
convertTosqlDate(to), action);
}
}
/**
* Dispatch the audit events and get the relevant audit trails.
* <p>
* @param action
*/
public void processLoginAuditRequest(UserAuditActions action) {
if (action.getValue().equals(UserAuditActions.REGISTRATION.getValue())) {
userLogins = getUserLogins(username, from, to, action.getValue());
} else if (action.getValue().equals(UserAuditActions.LOGIN.
getValue()) || action.getValue().equals(UserAuditActions.LOGOUT.
getValue())) {
userLogins = getUserLogins(username, from, to, action.getValue());
} else if (action.getValue().equals(UserAuditActions.SUCCESS.
getValue()) || action.getValue().equals(UserAuditActions.FAILED.
getValue())) {
userLogins = getUserLogins(username, from, to, action.getValue());
} else if (action.getValue().equals(UserAuditActions.QRCODE.
getValue()) || action.getValue().equals(UserAuditActions.RECOVERY.
getValue())) {
userLogins = getUserLogins(username, from, to, action.getValue());
} else if(action.getValue().equals(UserAuditActions.ALL.getValue())){
userLogins = getUserLogins(username, from, to, action.getValue());
} else {
MessagesController.addSecurityErrorMessage("Audit action not supported.");
}
}
/**
* Dispatch the audit events and get the relevant audit trails.
* <p>
* @param action
*/
public void processAccountAuditRequest(AccountsAuditActions action) {
if (action.getValue().equals(AccountsAuditActions.PASSWORDCHANGE.getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.LOSTDEVICE.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.PROFILEUPDATE.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.SECQUESTIONCHANGE.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.PROFILEUPDATE.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.USERMANAGEMENT.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.USERMANAGEMENT.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else {
MessagesController.addSecurityErrorMessage("Audit action not supported.");
}
}
/**
* Generate audit report for role entitlement.
* <p>
* @param action
*/
public void processRoleAuditRequest(RolesAuditActions action) {
if (action.getValue().equals(RolesAuditActions.ADDROLE.getValue())) {
roleAudit = getRoleAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(RolesAuditActions.REMOVEROLE.getValue())) {
roleAudit = getRoleAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(RolesAuditActions.ALLROLEASSIGNMENTS.
getValue())) {
roleAudit = getRoleAudit(username, from, to, action.getValue());
}else if (action.getValue().equals(RolesAuditActions.SUCCESS) || action.getValue().equals(RolesAuditActions.FAILED)) {
roleAudit = getRoleAudit(username, from, to, action.getValue());
} else {
MessagesController.addSecurityErrorMessage("Audit action not supported.");
}
}
/**
* Generate audit report for studies.
* <p>
* @param action
*/
public void processStudyAuditRequest(StudyAuditActions action) {
if (action.getValue().equals(StudyAuditActions.AUDITTRAILS.getValue())) {
ad = activityController.activityDetailOnStudyAudit(username,
convertTosqlDate(from), convertTosqlDate(to));
} else {
MessagesController.addSecurityErrorMessage("Audit action not supported.");
}
}
/**
* Convert the GUI date to SQL format.
* <p>
* @param calendarDate
* @return
*/
public java.sql.Date convertTosqlDate(java.util.Date calendarDate) {
return new java.sql.Date(calendarDate.getTime());
}
}
| src/main/java/se/kth/bbc/security/audit/AuditTrails.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package se.kth.bbc.security.audit;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import javax.ejb.EJB;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import se.kth.bbc.activity.Activity;
import se.kth.bbc.activity.ActivityController;
import se.kth.bbc.activity.ActivityDetail;
import se.kth.bbc.activity.ActivityFacade;
import se.kth.bbc.lims.MessagesController;
import se.kth.bbc.security.audit.model.AccountAudit;
import se.kth.bbc.security.audit.model.RolesAudit;
import se.kth.bbc.security.audit.model.Userlogins;
import se.kth.bbc.security.ua.UserManager;
import se.kth.hopsworks.user.model.Users;
@ManagedBean
@ViewScoped
public class AuditTrails implements Serializable {
private static final long serialVersionUID = 1L;
@EJB
private UserManager userManager;
@EJB
private AuditManager auditManager;
@EJB
private ActivityFacade activityController;
private String username;
private Date from;
private Date to;
private AccountsAuditActions selectedAccountsAuditAction;
private RolesAuditActions selectdeRolesAuditAction;
private StudyAuditActions selectedStudyAuditAction;
private UserAuditActions selectedLoginsAuditAction;
private List<Userlogins> userLogins;
private List<RolesAudit> roleAudit;
private List<AccountAudit> accountAudit;
private List<Activity> ad;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public Date getFrom() {
return from;
}
public void setFrom(Date from) {
this.from = from;
}
public Date getTo() {
return to;
}
public void setTo(Date to) {
this.to = to;
}
public RolesAuditActions[] getAuditActions() {
return RolesAuditActions.values();
}
public List<Userlogins> getUserLogins() {
return userLogins;
}
public void setUserLogins(List<Userlogins> userLogins) {
this.userLogins = userLogins;
}
public List<RolesAudit> getRoleAudit() {
return roleAudit;
}
public void setRoleAudit(List<RolesAudit> roleAudit) {
this.roleAudit = roleAudit;
}
public List<AccountAudit> getAccountAudit() {
return accountAudit;
}
public void setAccountAudit(List<AccountAudit> accountAudit) {
this.accountAudit = accountAudit;
}
public AccountsAuditActions[] getAccountsAuditActions() {
return AccountsAuditActions.values();
}
public RolesAuditActions[] getRolesAuditActions() {
return RolesAuditActions.values();
}
public UserAuditActions[] getLoginsAuditActions() {
return UserAuditActions.values();
}
public StudyAuditActions[] getStudyAuditActions() {
return StudyAuditActions.values();
}
public AccountsAuditActions getSelectedAccountsAuditAction() {
return selectedAccountsAuditAction;
}
public void setSelectedAccountsAuditAction(
AccountsAuditActions selectedAccountsAuditAction) {
this.selectedAccountsAuditAction = selectedAccountsAuditAction;
}
public RolesAuditActions getSelectdeRolesAuditAction() {
return selectdeRolesAuditAction;
}
public void setSelectdeRolesAuditAction(
RolesAuditActions selectdeRolesAuditAction) {
this.selectdeRolesAuditAction = selectdeRolesAuditAction;
}
public StudyAuditActions getSelectedStudyAuditAction() {
return selectedStudyAuditAction;
}
public void setSelectedStudyAuditAction(
StudyAuditActions selectedStudyAuditAction) {
this.selectedStudyAuditAction = selectedStudyAuditAction;
}
public UserAuditActions getSelectedLoginsAuditAction() {
return selectedLoginsAuditAction;
}
public void setSelectedLoginsAuditAction(
UserAuditActions selectedLoginsAuditAction) {
this.selectedLoginsAuditAction = selectedLoginsAuditAction;
}
public List<Activity> getAd() {
return ad;
}
public void setAd(List<Activity> ad) {
this.ad = ad;
}
/**
* Generate audit report for account modifications.
* <p>
* @param username
* @param from
* @param to
* @param action
* @return
*/
public List<AccountAudit> getAccoutnAudit(String username, Date from, Date to,
String action) {
Users u = userManager.getUserByEmail(username);
if (u == null) {
return auditManager.getAccountAudit(convertTosqlDate(from),
convertTosqlDate(to), action);
} else {
return auditManager.getAccountAudit(u.getUid(), convertTosqlDate(from),
convertTosqlDate(to), action);
}
}
/**
*
* @param studyName
* @param from
* @param to
* @return
*/
public List<Activity> getStudyAudit(String studyName, Date from, Date to) {
return activityController.activityDetailOnStudyAudit(studyName,
convertTosqlDate(from), convertTosqlDate(to));
}
/**
* Generate audit report for role entitlement.
* <p>
* @param username
* @param from
* @param to
* @param action
* @return
*/
public List<RolesAudit> getRoleAudit(String username, Date from, Date to,
String action) {
Users u = userManager.getUserByEmail(username);
if (u == null) {
return auditManager.getRoletAudit(convertTosqlDate(from),
convertTosqlDate(to), action);
} else {
return auditManager.getRoletAudit(u.getUid(), convertTosqlDate(from),
convertTosqlDate(to), action);
}
}
/**
*
* @param username
* @param from
* @param to
* @param action
* @return
*/
public List<Userlogins> getUserLogins(String username, Date from, Date to,
String action) {
Users u = userManager.getUserByEmail(username);
if (u == null) {
return auditManager.getUsersLoginsFromTo(convertTosqlDate(from),
convertTosqlDate(to), action);
} else {
return auditManager.
getUserLoginsFromTo(u.getUid(), convertTosqlDate(from),
convertTosqlDate(to), action);
}
}
/**
* Dispatch the audit events and get the relevant audit trails.
* <p>
* @param action
*/
public void processLoginAuditRequest(UserAuditActions action) {
if (action.getValue().equals(UserAuditActions.REGISTRATION.getValue())) {
userLogins = getUserLogins(username, from, to, action.getValue());
} else if (action.getValue().equals(UserAuditActions.LOGIN.
getValue()) || action.getValue().equals(UserAuditActions.LOGOUT.
getValue())) {
userLogins = getUserLogins(username, from, to, action.getValue());
}else if(action.getValue().equals(UserAuditActions.ALL.getValue())){
userLogins = getUserLogins(username, from, to, action.getValue());
} else {
MessagesController.addSecurityErrorMessage("Audit action not supported.");
}
}
/**
* Dispatch the audit events and get the relevant audit trails.
* <p>
* @param action
*/
public void processAccountAuditRequest(AccountsAuditActions action) {
if (action.getValue().equals(AccountsAuditActions.PASSWORDCHANGE.getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.LOSTDEVICE.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.PROFILEUPDATE.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.SECQUESTIONCHANGE.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.PROFILEUPDATE.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(AccountsAuditActions.USERMANAGEMENT.
getValue())) {
accountAudit = getAccoutnAudit(username, from, to, action.getValue());
} else {
MessagesController.addSecurityErrorMessage("Audit action not supported.");
}
}
/**
* Generate audit report for role entitlement.
* <p>
* @param action
*/
public void processRoleAuditRequest(RolesAuditActions action) {
if (action.getValue().equals(RolesAuditActions.ADDROLE.getValue())) {
roleAudit = getRoleAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(RolesAuditActions.REMOVEROLE.getValue())) {
roleAudit = getRoleAudit(username, from, to, action.getValue());
} else if (action.getValue().equals(RolesAuditActions.ALLROLEASSIGNMENTS.
getValue())) {
roleAudit = getRoleAudit(username, from, to, action.getValue());
} else {
MessagesController.addSecurityErrorMessage("Audit action not supported.");
}
}
/**
* Generate audit report for studies.
* <p>
* @param action
*/
public void processStudyAuditRequest(StudyAuditActions action) {
if (action.getValue().equals(StudyAuditActions.AUDITTRAILS.getValue())) {
ad = activityController.activityDetailOnStudyAudit(username,
convertTosqlDate(from), convertTosqlDate(to));
} else {
MessagesController.addSecurityErrorMessage("Audit action not supported.");
}
}
/**
* Convert the GUI date to SQL format.
* <p>
* @param calendarDate
* @return
*/
public java.sql.Date convertTosqlDate(java.util.Date calendarDate) {
return new java.sql.Date(calendarDate.getTime());
}
}
| Added new options for audit reports
| src/main/java/se/kth/bbc/security/audit/AuditTrails.java | Added new options for audit reports | <ide><path>rc/main/java/se/kth/bbc/security/audit/AuditTrails.java
<ide> getValue()) || action.getValue().equals(UserAuditActions.LOGOUT.
<ide> getValue())) {
<ide> userLogins = getUserLogins(username, from, to, action.getValue());
<del> }else if(action.getValue().equals(UserAuditActions.ALL.getValue())){
<add> } else if (action.getValue().equals(UserAuditActions.SUCCESS.
<add> getValue()) || action.getValue().equals(UserAuditActions.FAILED.
<add> getValue())) {
<add> userLogins = getUserLogins(username, from, to, action.getValue());
<add> } else if (action.getValue().equals(UserAuditActions.QRCODE.
<add> getValue()) || action.getValue().equals(UserAuditActions.RECOVERY.
<add> getValue())) {
<add> userLogins = getUserLogins(username, from, to, action.getValue());
<add> } else if(action.getValue().equals(UserAuditActions.ALL.getValue())){
<ide> userLogins = getUserLogins(username, from, to, action.getValue());
<ide> } else {
<ide> MessagesController.addSecurityErrorMessage("Audit action not supported.");
<ide> getValue())) {
<ide> accountAudit = getAccoutnAudit(username, from, to, action.getValue());
<ide> } else if (action.getValue().equals(AccountsAuditActions.PROFILEUPDATE.
<add> getValue())) {
<add> accountAudit = getAccoutnAudit(username, from, to, action.getValue());
<add> } else if (action.getValue().equals(AccountsAuditActions.USERMANAGEMENT.
<ide> getValue())) {
<ide> accountAudit = getAccoutnAudit(username, from, to, action.getValue());
<ide> } else if (action.getValue().equals(AccountsAuditActions.USERMANAGEMENT.
<ide> } else if (action.getValue().equals(RolesAuditActions.ALLROLEASSIGNMENTS.
<ide> getValue())) {
<ide> roleAudit = getRoleAudit(username, from, to, action.getValue());
<add> }else if (action.getValue().equals(RolesAuditActions.SUCCESS) || action.getValue().equals(RolesAuditActions.FAILED)) {
<add> roleAudit = getRoleAudit(username, from, to, action.getValue());
<ide> } else {
<ide> MessagesController.addSecurityErrorMessage("Audit action not supported.");
<ide> } |
|
JavaScript | mit | 58239cba2ca6561c10297b48fc4d6d5eaac82563 | 0 | danielbh/danielhollcraft.com,danielbh/danielhollcraft.com-gatsbyjs,danielbh/danielhollcraft.com,danielbh/danielhollcraft.com | import React from "react"
let stylesStr
if (process.env.NODE_ENV === `production`) {
try {
stylesStr = require(`!raw-loader!../public/styles.css`)
} catch (e) {
console.log(e)
}
}
module.exports = class HTML extends React.Component {
render() {
let css
if (process.env.NODE_ENV === `production`) {
css = (
<style
id="gatsby-inlined-css"
dangerouslySetInnerHTML={{ __html: stylesStr }}
/>
)
}
return (
<html {...this.props.htmlAttributes}>
<head>
<meta charSet="utf-8" />
<meta httpEquiv="x-ua-compatible" content="ie=edge" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, shrink-to-fit=no"
/>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css" />
{this.props.headComponents}
{css}
</head>
<body {...this.props.bodyAttributes}>
{this.props.preBodyComponents}
<div
key={`body`}
id="___gatsby"
dangerouslySetInnerHTML={{ __html: this.props.body }}
/>
{this.props.postBodyComponents}
</body>
</html>
)
}
}
| ui/src/html.js | import React from "react"
let stylesStr
if (process.env.NODE_ENV === `production`) {
try {
stylesStr = require(`!raw-loader!../public/styles.css`)
} catch (e) {
console.log(e)
}
}
module.exports = class HTML extends React.Component {
render() {
let css
if (process.env.NODE_ENV === `production`) {
css = (
<style
id="gatsby-inlined-css"
dangerouslySetInnerHTML={{ __html: stylesStr }}
/>
)
}
return (
<html {...this.props.htmlAttributes}>
<head>
<meta charSet="utf-8" />
<meta httpEquiv="x-ua-compatible" content="ie=edge" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, shrink-to-fit=no"
/>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css" />
{this.props.headComponents}
{css}
<script async>(function(s,u,m,o,j,v){j = u.createElement(m); v=u.getElementsByTagName(m)[0];j.async=1;j.src=o;j.dataset.sumoSiteId='d72f256005f80506fdecd25e36de7177abb7aee5891029af111bfd798f21a62b';v.parentNode.insertBefore(j,v)})(window,document,'script','//load.sumo.com/');</script>
</head>
<body {...this.props.bodyAttributes}>
{this.props.preBodyComponents}
<div
key={`body`}
id="___gatsby"
dangerouslySetInnerHTML={{ __html: this.props.body }}
/>
{this.props.postBodyComponents}
</body>
</html>
)
}
}
| remove sumo code
| ui/src/html.js | remove sumo code | <ide><path>i/src/html.js
<ide> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css" />
<ide> {this.props.headComponents}
<ide> {css}
<del> <script async>(function(s,u,m,o,j,v){j = u.createElement(m); v=u.getElementsByTagName(m)[0];j.async=1;j.src=o;j.dataset.sumoSiteId='d72f256005f80506fdecd25e36de7177abb7aee5891029af111bfd798f21a62b';v.parentNode.insertBefore(j,v)})(window,document,'script','//load.sumo.com/');</script>
<ide> </head>
<ide> <body {...this.props.bodyAttributes}>
<ide> {this.props.preBodyComponents} |
|
JavaScript | bsd-3-clause | 455e4115e4b1a0b3579b9d888af44c5cde2a426b | 0 | SCAII/SCAII,SCAII/SCAII,SCAII/SCAII,SCAII/SCAII,SCAII/SCAII |
goog.require('proto.scaii.common.Action');
goog.require('proto.scaii.common.AgentCfg');
goog.require('proto.scaii.common.AgentEndpoint');
goog.require('proto.scaii.common.AgentSupported');
goog.require('proto.scaii.common.BackendCfg');
goog.require('proto.scaii.common.BackendEndpoint');
goog.require('proto.scaii.common.BackendInit');
goog.require('proto.scaii.common.BackendSupported');
goog.require('proto.scaii.common.BackendSupported.SerializationSupport');
goog.require('proto.scaii.common.Cfg');
goog.require('proto.scaii.common.Color');
goog.require('proto.scaii.common.CoreCfg');
goog.require('proto.scaii.common.CoreEndpoint');
goog.require('proto.scaii.common.Endpoint');
goog.require('proto.scaii.common.Entity');
goog.require('proto.scaii.common.Error');
goog.require('proto.scaii.common.ExplanationPoint');
goog.require('proto.scaii.common.InitAs');
goog.require('proto.scaii.common.ModuleCfg');
goog.require('proto.scaii.common.ModuleEndpoint');
goog.require('proto.scaii.common.ModuleInit');
goog.require('proto.scaii.common.ModuleSupported');
goog.require('proto.scaii.common.MultiMessage');
goog.require('proto.scaii.common.Other');
goog.require('proto.scaii.common.PluginType');
goog.require('proto.scaii.common.Pos');
goog.require('proto.scaii.common.Rect');
goog.require('proto.scaii.common.RustFFIConfig');
goog.require('proto.scaii.common.ScaiiPacket');
goog.require('proto.scaii.common.SerializationFormat');
goog.require('proto.scaii.common.SerializationRequest');
goog.require('proto.scaii.common.SerializationResponse');
goog.require('proto.scaii.common.Shape');
goog.require('proto.scaii.common.State');
goog.require('proto.scaii.common.SupportedBehavior');
goog.require('proto.scaii.common.Triangle');
goog.require('proto.scaii.common.Viz');
goog.require('proto.scaii.common.VizInit');
/**
* Copyright (c) 2017-present, Oregon State University, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
var userInputBlocked = false;
var systemAcronym = "SCAII";
var systemTitle = "Small Configurable AI Interface";
// VizInit defaults
var testingMode = false;
var maxStep = 0;
var explanations = [];
var userCommandScaiiPackets = [];
var sessionState = "pending";
var currentStep = -1;
var gameScaleFactor = 5;
var spacingFactor = 1;
var sizingFactor = 1;
var zoomFactor = 3;
var zoomBoxOriginX = 0;
var zoomBoxOriginY = 0;
var entitiesList = undefined;
var shapePositionMapForContext = {};
var primaryHighlightedShapeIds = [];
var secondaryHighlightedShapeIds = [];
var explanationBoxMap = {};
var game_background_color = "#123456";
var explanationControlYPosition = 14;
// Create the gameboard canvas
var gameboard_canvas = document.createElement("canvas");
var gameboard_ctx = gameboard_canvas.getContext("2d");
var gameboard_zoom_canvas = document.createElement("canvas");
var gameboard_zoom_ctx = gameboard_zoom_canvas.getContext("2d");
var expl_ctrl_canvas = document.createElement("canvas");
var expl_ctrl_ctx = expl_ctrl_canvas.getContext("2d");
var replaySessionConfig;
expl_ctrl_canvas.addEventListener('click', function (event) {
matchingStep = getMatchingExplanationStep(expl_ctrl_ctx, event.offsetX, event.offsetY);
reflectSelectedStep(matchingStep);
console.log('clicked on step ' + matchingStep);
if (matchingStep != undefined) {
var userCommand = new proto.scaii.common.UserCommand;
userCommand.setCommandType(proto.scaii.common.UserCommand.UserCommandType.EXPLAIN);
var args = ['' +matchingStep];
userCommand.setArgsList(args);
stageUserCommand(userCommand);
handleReplaySessionConfig(replaySessionConfig,matchingStep);
// var targetStepString = '' + matchingStep;
// var args = [targetStepString];
// var userCommand = new proto.scaii.common.UserCommand;
// userCommand.setCommandType(proto.scaii.common.UserCommand.UserCommandType.JUMP_TO_STEP);
// userCommand.setArgsList(args);
// stageUserCommand(userCommand);
}
});
gameboard_canvas.addEventListener('click', function (event) {
if (event.shiftKey) {
adjustZoomBoxPosition(event.offsetX, event.offsetY);
handleEntities(entitiesList);
}
else {
shapeId = getClosestInRangeShapeId(gameboard_ctx, event.offsetX, event.offsetY, shapePositionMapForContext["game"]);
primaryHighlightedShapeIds = [];
if (shapeId != undefined) {
primaryHighlightedShapeIds.push(shapeId);
}
handleEntities(entitiesList);
}
});
gameboard_zoom_canvas.addEventListener('click', function (event) {
shapeId = getClosestInRangeShapeId(gameboard_zoom_ctx, event.offsetX, event.offsetY, shapePositionMapForContext["zoom"]);
primaryHighlightedShapeIds = [];
if (shapeId != undefined) {
primaryHighlightedShapeIds.push(shapeId);
}
handleEntities(entitiesList);
});
var gameboardWidth;
var gameboardHeight;
var timeline_canvas = document.createElement("canvas");
var timeline_ctx = timeline_canvas.getContext("2d");
var pauseResumeButton = document.createElement("BUTTON");
var rewindButton = document.createElement("BUTTON");
var speedSlider = document.createElement("input");
var zoomSlider = document.createElement("input");
rewindButton.disabled = true;
rewindButton.setAttribute("id", "rewindButton");
pauseResumeButton.disabled = true;
pauseResumeButton.setAttribute("id", "pauseResumeButton");
var controlsManager = configureControlsManager(pauseResumeButton, rewindButton);
var shape_outline_color = '#202020';
var shape_outline_width = 2;
var use_shape_color_for_outline = false;
var dealer;
var masterEntities = {};
function adjustZoomBoxPosition(x, y) {
// they clicked at new target for center of box.
var boxWidth = gameboard_canvas.width / zoomFactor;
var boxHeight = gameboard_canvas.height / zoomFactor;
zoomBoxOriginX = x - boxWidth / 2;
zoomBoxOriginY = y - boxHeight / 2;
if (zoomBoxOriginX < 0) {
zoomBoxOriginX = 0;
}
else if (zoomBoxOriginX > gameboard_canvas.width - boxWidth) {
zoomBoxOriginX = gameboard_canvas.width - boxWidth;
}
else {
// a-ok - they clicked in the middle somewhere
}
if (zoomBoxOriginY < 0) {
zoomBoxOriginY = 0;
}
else if (zoomBoxOriginY > gameboard_canvas.height - boxHeight) {
zoomBoxOriginY = gameboard_canvas.height - boxHeight;
}
else {
// a-ok - they clicked in the middle somewhere
}
}
function updateButtonsAsPerCurrentStep() {
if (currentStep == 0) {
controlsManager.expressResumeButton();
controlsManager.enablePauseResume();
controlsManager.disableRewind();
}
else if (currentStep == 1) {
controlsManager.expressResumeButton();
controlsManager.enablePauseResume();
controlsManager.disableRewind();
}
else if (currentStep == maxStep) {
controlsManager.expressPauseButton();
controlsManager.disablePauseResume();
controlsManager.enableRewind();
}
else {
controlsManager.expressPauseButton();
controlsManager.enablePauseResume();
controlsManager.enableRewind();
}
}
function handleReplayControl(replayControl) {
var command = replayControl.getCommandList();
if (command.length == 2) {
if (command[0] == 'set_step_position') {
currentStep = parseInt(command[1]);
console.log('replay control set step_position to ' + currentStep);
updateProgress(currentStep, maxStep);
updateButtonsAsPerCurrentStep();
}
}
}
function handleReplaySessionConfig(rsc, selectedStep) {
explanationBoxMap = {};
if (rsc.hasStepCount()) {
maxStep = rsc.getStepCount() - 1;
}
var explanation_steps = rsc.getExplanationStepsList();
var explanation_titles = rsc.getExplanationTitlesList();
console.log("explanation count is " + explanation_steps.length);
var expl_count = explanation_steps.length;
var index = 0;
while (index < expl_count){
var step = explanation_steps[index];
var selected = false;
if (selectedStep == step){
selected = true;
}
var title = explanation_titles[index];
configureExplanation(rsc.getStepCount(), step, title, selected);
index = index + 1;
}
}
function handleExplDetails(explDetails){
console.log('handling expl details');
if (explDetails.hasExplPoint()){
explanationPoint = explDetails.getExplPoint();
console.log('got expl point for step ' + explanationPoint.getStep());
renderExplanationPoint(explanationPoint);
}
else {
console.log("MISSING expl point!");
}
}
function handleVizInit(vizInit) {
clearGameBoards();
//gameboard_ctx.fillText("Received VizInit!", 10, 50);
if (vizInit.hasTestMode()) {
if (vizInit.getTestMode()) {
testingMode = true;
}
}
// if (vizInit.hasGameboardWidth()) {
// gameboardWidth = vizInit.getGameboardWidth();
console.log("gameboard width : " + gameboardWidth);
// $("#scaii-gameboard").css("width", gameboardWidth);
// gameboard_canvas.width = gameboardWidth;
// }
// gameboard_zoom_canvas.width = gameboard_canvas.width;
// if (vizInit.hasGameboardHeight()) {
// gameboardHeight = vizInit.getGameboardHeight();
// $("#scaii-gameboard").css("height", gameboardHeight);
// gameboard_canvas.height = gameboardHeight;
// }
// gameboard_zoom_canvas.height = gameboard_canvas.height;
//renderTimeline(maxStep);
}
function handleViz(vizData) {
console.log('received Viz...');
entitiesList = vizData.getEntitiesList();
handleEntities(entitiesList);
//if (vizData.hasChart()) {
// var chartInfo = vizData.getChart();
// renderChartInfo(chartInfo, gameboardHeight);
//}
currentStep = currentStep + 1;
updateButtonsAsPerCurrentStep();
if (currentStep == maxStep) {
controlsManager.reachedEndOfGame();
}
console.log("current_step is " + currentStep + "maxStep is " + maxStep);
updateProgress(currentStep, maxStep);
}
function handleEntities(entitiesList) {
//console.log('entities count :' + entitiesList.length);
for (var i in entitiesList) {
var entity = entitiesList[i];
if (entity.hasId()) {
var idString = '' + entity.getId();
//if (idString == '8') {
// console.log('=========== UPDATING ENTITY ===================')
// logEntity(entity);
//}
//console.log('############## id string read as ' + idString + '###############');
if (masterEntities[idString] == undefined) {
if (entity.hasDelete() && entity.getDelete()) {
// do not add new entity that is marked as delete
}
else {
masterEntities[idString] = entity;
}
}
else {
if (entity.hasDelete() && entity.getDelete()) {
delete masterEntities[idString];
}
else {
var masterEntity = masterEntities[idString];
updateMasterEntity(masterEntity, entity);
}
}
//if (idString == '8') {
// console.log('=========== MASTER ENTITY AFTER UPDATE===================')
// logEntity(masterEntities[idString]);
//}
}
else {
console.log('-----ERROR----- no entity ID on entity');
}
}
renderState(gameboard_ctx, gameboard_canvas, masterEntities, gameScaleFactor, 0, 0, shapePositionMapForContext["game"]);
// disable zoom box for now
//drawZoomBox(gameboard_ctx, gameboard_canvas, zoomBoxOriginX, zoomBoxOriginY, zoomFactor);
//renderState(gameboard_zoom_ctx, gameboard_zoom_canvas, masterEntities, zoomFactor, zoomBoxOriginX, zoomBoxOriginY, shapePositionMapForContext["zoom"]);
}
function drawZoomBox(ctx, canvas, originX, originY, zoom) {
ctx.beginPath();
ctx.lineWidth = 1;
ctx.strokeStyle = 'white';
var width = canvas.width / zoom;
var height = canvas.height / zoom;
ctx.rect(originX, originY, width, height);
ctx.stroke();
//ctx.strokeRect(originX, originY, height, width);
}
function clearGameBoard(ctx, canvas, shapePositionMapKey) {
ctx.clearRect(0, 0, canvas.width, canvas.height);
//gameboard_ctx.clearRect(0, 0, gameboard_canvas.width, gameboard_canvas.height);
//gameboard_zoom_ctx.clearRect(0, 0, gameboard_zoom_canvas.width, gameboard_zoom_canvas.height);
shapePositionMapForContext[shapePositionMapKey] = {};
}
function clearExplanationControl(){
expl_ctrl_ctx.clearRect(0,0, expl_ctrl_canvas.width, expl_ctrl_canvas.height);
}
var draw_example_shapes = function () {
clearGameBoard(gameboard_ctx, gameboard_canvas, "game");
colorRGBA = getBasicColorRGBA();
drawRect(gameboard_ctx, 100, 100, 80, 80, colorRGBA);
drawTriangle(gameboard_ctx, 200, 200, 80, 'red');
}
var main = function () {
initUI();
//var redrawChartHiddenButton = document.createElement("BUTTON");
//redrawChartHiddenButton.setAttribute("id", "chartRedrawTriggerButton");
//redrawChartHiddenButton.appendChild(document.createTextNode("Refresh"));
//$("#scaii-game-controls").append(redrawChartHiddenButton);
var debug = true;
if (debug) {
var connectButton = document.createElement("BUTTON");
var connectText = document.createTextNode("Connect to Replay");
connectButton.setAttribute("class", "connectButton");
connectButton.setAttribute("id", "connectButton");
connectButton.appendChild(connectText);
connectButton.onclick = function () {
console.log("calling tryConnect");
tryConnect('.', 0);
};
$("#playback-panel").append(connectButton);
$("#connectButton").css("margin-left", "30px");
$("#connectButton").css("font-family", "Fira Sans");
$("#connectButton").css("font-size", "14px");
} else {
tryConnect('.', 0);
}
}
var configureSpeedSlider = function () {
speedSlider.setAttribute("type", "range");
speedSlider.setAttribute("min", "1");
speedSlider.setAttribute("max", "100");
speedSlider.setAttribute("value", "90");
speedSlider.setAttribute("class", "slider");
speedSlider.setAttribute("id", "speed-slider");
speedSlider.oninput = function () {
var speedString = "" + this.value;
var args = [speedString];
var userCommand = new proto.scaii.common.UserCommand;
userCommand.setCommandType(proto.scaii.common.UserCommand.UserCommandType.SET_SPEED);
userCommand.setArgsList(args);
stageUserCommand(userCommand);
}
//<input type="range" min="1" max="100" value="50" class="slider" id="myRange">
}
var configureZoomSlider = function () {
zoomSlider.setAttribute("type", "range");
zoomSlider.setAttribute("min", "100");
zoomSlider.setAttribute("max", "600");
zoomSlider.setAttribute("value", "200");
zoomSlider.setAttribute("class", "slider");
zoomSlider.setAttribute("id", "zoom-slider");
zoomSlider.oninput = function () {
zoomFactor = "" + this.value / 100;
console.log("zoom factor " + zoomFactor);
handleEntities(entitiesList);
}
}
var configureLabelContainer = function(id, fontSize, textVal, textAlign) {
$(id).css("font-family", "Fira Sans");
$(id).css("font-size", fontSize);
$(id).css("padding-left", "0px");
$(id).css("padding-right", "4px");
$(id).css("padding-top", "2px");
$(id).css("text-align", textAlign);
$(id).html(textVal);
}
var subtractPixels = function(a,b){
var intA = a.replace("px", "");
var intB = b.replace("px", "");
return intA - intB;
}
var configureExplanationControl = function() {
var container_width = $(".control-panel").css("width");
var container_padding = $(".control-panel").css("padding-right");
var can_width = subtractPixels(container_width,container_padding);
expl_ctrl_canvas.width = can_width;
expl_ctrl_canvas.height = 30;
$("#explanation-control-panel").append(expl_ctrl_canvas);
let ctx = expl_ctrl_ctx;
ctx.beginPath();
ctx.moveTo(0,explanationControlYPosition);
ctx.lineTo(can_width,explanationControlYPosition);
ctx.stroke();
console.log("drawing explanation control");
// expl_ctrl_canvas.background = 'red';
// ctx.save();
// var x = 0;
// var y = 13;
// var width = $("#replay-speed-panel").width();
// var height = 4;
// ctx.beginPath();
// ctx.lineWidth = 1;
// ctx.strokeStyle = 'black';
// ctx.strokeRect(x, y, width, height);
// ctx.fillStyle = 'white'
//ctx.fillStyle = colorRGBA;
// ctx.fillRect(x, y, width, height);
ctx.restore();
}
var initUI = function () {
configureSpeedSlider();
configureZoomSlider();
configureExplanationControl();
controlsManager.setControlsNotReady();
gameboard_canvas.width = 200;
gameboard_canvas.height = 200;
gameboard_zoom_canvas.width = gameboard_canvas.width;
gameboard_zoom_canvas.height = gameboard_canvas.height;
$("#scaii-gameboard").append(gameboard_canvas);
$("#scaii-gameboard").css("width", gameboard_canvas.width);
$("#scaii-gameboard").css("height", gameboard_canvas.height);
$("#scaii-gameboard").css("background-color", game_background_color);
$("#scaii-gameboard-zoom").append(gameboard_zoom_canvas);
$("#scaii-gameboard-zoom").css("width", gameboard_zoom_canvas.width);
$("#scaii-gameboard-zoom").css("height", gameboard_zoom_canvas.height);
$("#scaii-gameboard-zoom").css("background-color", game_background_color);
configureLabelContainer("#scaii-acronym","20px",systemAcronym, "center");
configureLabelContainer("#scaii-interface-title","16px",systemTitle, "center");
configureLabelContainer("#replay-speed-label","14px","replay speed", "right");
configureLabelContainer("#progress-label","14px","progress", "right");
configureLabelContainer("#explanation-control-label","14px","explanations", "right");
configureLabelContainer("#playback-label","14px","", "right");
$("#replay-speed-panel").append(speedSlider);
rewindButton.setAttribute("class", "playbackButton");
rewindButton.innerHTML = '<img src="imgs/rewind.png", height="8px" width="10px"/>';
rewindButton.onclick = tryRewind;
$("#playback-panel").append(rewindButton);
$("#scaii-game-controls").css("text-align", "center");
pauseResumeButton.setAttribute("class", "playbackButton");
pauseResumeButton.innerHTML = '<img src="imgs/pause.png", height="8px" width="10px"/>';
$("#playback-panel").append(pauseResumeButton);
pauseResumeButton.onclick = tryPause;
var zoomSliderLabel = document.createElement("div");
$("#scaii-zoom-controls").append(zoomSliderLabel);
zoomSliderLabel.setAttribute("id", "zoom-slider-label");
$("#zoom-slider-label").html("zoom");
$("#zoom-slider-label").css("font-family", "Fira Sans");
$("#zoom-slider-label").css("font-size", "12px");
$("#zoom-slider-label").css("padding-left", "6px");
$("#zoom-slider-label").css("padding-right", "4px");
$("#zoom-slider-label").css("padding-top", "2px");
$("#scaii-zoom-controls").append(zoomSlider);
$("#game-progress").click(processTimelineClick);
}
function clearGameBoards() {
clearGameBoard(gameboard_ctx, gameboard_canvas, "game");
clearGameBoard(gameboard_zoom_ctx, gameboard_zoom_canvas, "zoom");
}
function drawExplanationBox(step, type) {
var stepNumber = Number.parseInt(step);
var startX = 10 + step * 4;
var startY = 10;
timeline_ctx.moveTo(startX, startY);
timeline_ctx.lineTo(startX - 7, startY - 7);
timeline_ctx.lineTo(startX + 7, startY - 7);
timeline_ctx.moveTo(startX, startY);
timeline_ctx.stroke();
//timeline_ctx.addHitRegion({id: step});
}
// calls connect and paints "working" dots. If connect fails, it calls tryConnect again
function tryConnect(dots, attemptCount) {
clearGameBoards();
gameboard_ctx.font = "40px Georgia";
if (dots == '.') {
dots = '..';
}
else if (dots == '..') {
dots = '...';
}
else {
dots = '.';
}
attemptCount = attemptCount + 1;
$("#scaii-interface-title").html(systemTitle + " (... connecting " + attemptCount + " " + dots + ")");
//gameboard_ctx.fillText("connecting " + attemptCount + " " + dots, 10, 50);
connect(dots, attemptCount);
}
var drawExplanationBarChart = function () {
var options = {
//legend: { position: "none" },
title: 'Population of Largest U.S. Cities',
chartArea: { width: '50%' },
hAxis: {
title: 'Total Population',
minValue: 0
},
vAxis: {
title: 'City'
},
'width': 600,
'height': 400
};
var chartData = [
['Decision', 'r1', 'r2'],
['unit victorious', 0.77, 0.4],
['unit loses', -0.39, 0.6],
['adversary flees', 0.2, 0.3]
];
drawBarChart(chartData, options);
}
var ack = function(dealer){
var mm = new proto.scaii.common.MultiMessage;
dealer.send(mm.serializeBinary());
}
var connect = function (dots, attemptCount) {
dealer = new WebSocket('ws://localhost:6112');
dealer.binaryType = 'arraybuffer';
dealer.onopen = function (event) {
$("#scaii-interface-title").html(systemTitle);
console.log("WS Opened.");
};
dealer.onmessage = function (message) {
try {
sessionState = "inProgress";
var s = message.data;
var sPacket = proto.scaii.common.ScaiiPacket.deserializeBinary(s);
if (sPacket.hasReplaySessionConfig()) {
console.log("-----got replaySessionConfig");
var config = sPacket.getReplaySessionConfig();
replaySessionConfig = config;
//var selectedStep = undefined;
handleReplaySessionConfig(config,undefined);
ack(dealer);
}
else if (sPacket.hasVizInit()) {
console.log("-----got vizInit");
var vizInit = sPacket.getVizInit();
handleVizInit(vizInit);
controlsManager.gameStarted();
ack(dealer);
}
else if (sPacket.hasViz()) {
console.log("-----got Viz");
var viz = sPacket.getViz();
handleViz(viz);
// we're moving forward so rewind should be enabled
controlsManager.gameSteppingForward();
var mm;
if (testingMode) {
mm = buildReturnMultiMessageFromState(masterEntities);
}
else {
mm = new proto.scaii.common.MultiMessage;
}
dealer.send(mm.serializeBinary());
}
else if (sPacket.hasExplDetails()) {
console.log('has expl details');
var explDetails = sPacket.getExplDetails();
handleExplDetails(explDetails);
ack(dealer);
}
else if (sPacket.hasReplayControl()) {
console.log("-----got replayCOntrol");
var replayControl = sPacket.getReplayControl();
handleReplayControl(replayControl);
ack(dealer);
}
else if (sPacket.hasErr()) {
console.log("-----got errorPkt");
console.log(sPacket.getErr().getDescription())
ack(dealer);
}
else if (sPacket.hasUserCommand()) {
console.log("-----got userCommand");
var userCommand = sPacket.getUserCommand();
var commandType = userCommand.getCommandType();
if (commandType == proto.scaii.common.UserCommand.UserCommandType.POLL_FOR_COMMANDS) {
var mm;
if (userCommandScaiiPackets.length > 0) {
mm = buildResponseToReplay(userCommandScaiiPackets);
dealer.send(mm.serializeBinary());
controlsManager.userCommandSent();
}
else {
mm = new proto.scaii.common.MultiMessage;
dealer.send(mm.serializeBinary());
}
userCommandScaiiPackets = [];
}
else if (commandType == proto.scaii.common.UserCommand.UserCommandType.JUMP_COMPLETED) {
controlsManager.jumpCompleted();
ack(dealer);
}
}
else {
console.log(sPacket.toString())
console.log('unexpected message from system!');
ack(dealer);
}
}
catch (err) {
alert(err.message);
}
};
dealer.onclose = function (closeEvent) {
console.log("closefired " + attemptCount);
if (sessionState == "pending") {
// the closed connection was likely due to failed connection. try reconnecting
setTimeout(function () { tryConnect(dots, attemptCount); }, 2000);
}
};
dealer.onerror = function (err) {
console.log("Error: " + err);
alert("Error: " + err);
};
};
var then = Date.now();
main(); | viz/js/scaiiViz.js |
goog.require('proto.scaii.common.Action');
goog.require('proto.scaii.common.AgentCfg');
goog.require('proto.scaii.common.AgentEndpoint');
goog.require('proto.scaii.common.AgentSupported');
goog.require('proto.scaii.common.BackendCfg');
goog.require('proto.scaii.common.BackendEndpoint');
goog.require('proto.scaii.common.BackendInit');
goog.require('proto.scaii.common.BackendSupported');
goog.require('proto.scaii.common.BackendSupported.SerializationSupport');
goog.require('proto.scaii.common.Cfg');
goog.require('proto.scaii.common.Color');
goog.require('proto.scaii.common.CoreCfg');
goog.require('proto.scaii.common.CoreEndpoint');
goog.require('proto.scaii.common.Endpoint');
goog.require('proto.scaii.common.Entity');
goog.require('proto.scaii.common.Error');
goog.require('proto.scaii.common.ExplanationPoint');
goog.require('proto.scaii.common.InitAs');
goog.require('proto.scaii.common.ModuleCfg');
goog.require('proto.scaii.common.ModuleEndpoint');
goog.require('proto.scaii.common.ModuleInit');
goog.require('proto.scaii.common.ModuleSupported');
goog.require('proto.scaii.common.MultiMessage');
goog.require('proto.scaii.common.Other');
goog.require('proto.scaii.common.PluginType');
goog.require('proto.scaii.common.Pos');
goog.require('proto.scaii.common.Rect');
goog.require('proto.scaii.common.RustFFIConfig');
goog.require('proto.scaii.common.ScaiiPacket');
goog.require('proto.scaii.common.SerializationFormat');
goog.require('proto.scaii.common.SerializationRequest');
goog.require('proto.scaii.common.SerializationResponse');
goog.require('proto.scaii.common.Shape');
goog.require('proto.scaii.common.State');
goog.require('proto.scaii.common.SupportedBehavior');
goog.require('proto.scaii.common.Triangle');
goog.require('proto.scaii.common.Viz');
goog.require('proto.scaii.common.VizInit');
/**
* Copyright (c) 2017-present, Oregon State University, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
var userInputBlocked = false;
var systemAcronym = "SCAII";
var systemTitle = "Small Configurable AI Interface";
// VizInit defaults
var testingMode = false;
var maxStep = 0;
var explanations = [];
var userCommandScaiiPackets = [];
var sessionState = "pending";
var currentStep = -1;
var gameScaleFactor = 5;
var spacingFactor = 1;
var sizingFactor = 1;
var zoomFactor = 3;
var zoomBoxOriginX = 0;
var zoomBoxOriginY = 0;
var entitiesList = undefined;
var shapePositionMapForContext = {};
var primaryHighlightedShapeIds = [];
var secondaryHighlightedShapeIds = [];
var explanationBoxMap = {};
var game_background_color = "#123456";
var explanationControlYPosition = 14;
// Create the gameboard canvas
var gameboard_canvas = document.createElement("canvas");
var gameboard_ctx = gameboard_canvas.getContext("2d");
var gameboard_zoom_canvas = document.createElement("canvas");
var gameboard_zoom_ctx = gameboard_zoom_canvas.getContext("2d");
var expl_ctrl_canvas = document.createElement("canvas");
var expl_ctrl_ctx = expl_ctrl_canvas.getContext("2d");
var replaySessionConfig;
expl_ctrl_canvas.addEventListener('click', function (event) {
matchingStep = getMatchingExplanationStep(expl_ctrl_ctx, event.offsetX, event.offsetY);
reflectSelectedStep(matchingStep);
console.log('clicked on step ' + matchingStep);
if (matchingStep != undefined) {
var userCommand = new proto.scaii.common.UserCommand;
userCommand.setCommandType(proto.scaii.common.UserCommand.UserCommandType.EXPLAIN);
var args = ['' +matchingStep];
userCommand.setArgsList(args);
stageUserCommand(userCommand);
//handleReplaySessionConfig(replaySessionConfig,matchingStep);
// var targetStepString = '' + matchingStep;
// var args = [targetStepString];
// var userCommand = new proto.scaii.common.UserCommand;
// userCommand.setCommandType(proto.scaii.common.UserCommand.UserCommandType.JUMP_TO_STEP);
// userCommand.setArgsList(args);
// stageUserCommand(userCommand);
}
});
gameboard_canvas.addEventListener('click', function (event) {
if (event.shiftKey) {
adjustZoomBoxPosition(event.offsetX, event.offsetY);
handleEntities(entitiesList);
}
else {
shapeId = getClosestInRangeShapeId(gameboard_ctx, event.offsetX, event.offsetY, shapePositionMapForContext["game"]);
primaryHighlightedShapeIds = [];
if (shapeId != undefined) {
primaryHighlightedShapeIds.push(shapeId);
}
handleEntities(entitiesList);
}
});
gameboard_zoom_canvas.addEventListener('click', function (event) {
shapeId = getClosestInRangeShapeId(gameboard_zoom_ctx, event.offsetX, event.offsetY, shapePositionMapForContext["zoom"]);
primaryHighlightedShapeIds = [];
if (shapeId != undefined) {
primaryHighlightedShapeIds.push(shapeId);
}
handleEntities(entitiesList);
});
var gameboardWidth;
var gameboardHeight;
var timeline_canvas = document.createElement("canvas");
var timeline_ctx = timeline_canvas.getContext("2d");
var pauseResumeButton = document.createElement("BUTTON");
var rewindButton = document.createElement("BUTTON");
var speedSlider = document.createElement("input");
var zoomSlider = document.createElement("input");
rewindButton.disabled = true;
rewindButton.setAttribute("id", "rewindButton");
pauseResumeButton.disabled = true;
pauseResumeButton.setAttribute("id", "pauseResumeButton");
var controlsManager = configureControlsManager(pauseResumeButton, rewindButton);
var shape_outline_color = '#202020';
var shape_outline_width = 2;
var use_shape_color_for_outline = false;
var dealer;
var masterEntities = {};
function adjustZoomBoxPosition(x, y) {
// they clicked at new target for center of box.
var boxWidth = gameboard_canvas.width / zoomFactor;
var boxHeight = gameboard_canvas.height / zoomFactor;
zoomBoxOriginX = x - boxWidth / 2;
zoomBoxOriginY = y - boxHeight / 2;
if (zoomBoxOriginX < 0) {
zoomBoxOriginX = 0;
}
else if (zoomBoxOriginX > gameboard_canvas.width - boxWidth) {
zoomBoxOriginX = gameboard_canvas.width - boxWidth;
}
else {
// a-ok - they clicked in the middle somewhere
}
if (zoomBoxOriginY < 0) {
zoomBoxOriginY = 0;
}
else if (zoomBoxOriginY > gameboard_canvas.height - boxHeight) {
zoomBoxOriginY = gameboard_canvas.height - boxHeight;
}
else {
// a-ok - they clicked in the middle somewhere
}
}
function updateButtonsAsPerCurrentStep() {
if (currentStep == 0) {
controlsManager.expressResumeButton();
controlsManager.enablePauseResume();
controlsManager.disableRewind();
}
else if (currentStep == 1) {
controlsManager.expressResumeButton();
controlsManager.enablePauseResume();
controlsManager.disableRewind();
}
else if (currentStep == maxStep) {
controlsManager.expressPauseButton();
controlsManager.disablePauseResume();
controlsManager.enableRewind();
}
else {
controlsManager.expressPauseButton();
controlsManager.enablePauseResume();
controlsManager.enableRewind();
}
}
function handleReplayControl(replayControl) {
var command = replayControl.getCommandList();
if (command.length == 2) {
if (command[0] == 'set_step_position') {
currentStep = parseInt(command[1]);
console.log('replay control set step_position to ' + currentStep);
updateProgress(currentStep, maxStep);
updateButtonsAsPerCurrentStep();
}
}
}
function handleReplaySessionConfig(rsc) {
explanationBoxMap = {};
if (rsc.hasStepCount()) {
maxStep = rsc.getStepCount() - 1;
}
var explanation_steps = rsc.getExplanationStepsList();
var explanation_titles = rsc.getExplanationTitlesList();
console.log("explanation count is " + explanation_steps.length);
var expl_count = explanation_steps.length;
var index = 0;
while (index < expl_count){
var step = explanation_steps[index];
var title = explanation_titles[index];
configureExplanation(rsc.getStepCount(), step, title);
index = index + 1;
}
}
function handleExplDetails(explDetails){
console.log('handling expl details');
if (explDetails.hasExplPoint()){
explanationPoint = explDetails.getExplPoint();
console.log('got expl point for step ' + explanationPoint.getStep());
renderExplanationPoint(explanationPoint);
}
else {
console.log("MISSING expl point!");
}
}
function handleVizInit(vizInit) {
clearGameBoards();
//gameboard_ctx.fillText("Received VizInit!", 10, 50);
if (vizInit.hasTestMode()) {
if (vizInit.getTestMode()) {
testingMode = true;
}
}
// if (vizInit.hasGameboardWidth()) {
// gameboardWidth = vizInit.getGameboardWidth();
console.log("gameboard width : " + gameboardWidth);
// $("#scaii-gameboard").css("width", gameboardWidth);
// gameboard_canvas.width = gameboardWidth;
// }
// gameboard_zoom_canvas.width = gameboard_canvas.width;
// if (vizInit.hasGameboardHeight()) {
// gameboardHeight = vizInit.getGameboardHeight();
// $("#scaii-gameboard").css("height", gameboardHeight);
// gameboard_canvas.height = gameboardHeight;
// }
// gameboard_zoom_canvas.height = gameboard_canvas.height;
//renderTimeline(maxStep);
}
function handleViz(vizData) {
console.log('received Viz...');
entitiesList = vizData.getEntitiesList();
handleEntities(entitiesList);
//if (vizData.hasChart()) {
// var chartInfo = vizData.getChart();
// renderChartInfo(chartInfo, gameboardHeight);
//}
currentStep = currentStep + 1;
updateButtonsAsPerCurrentStep();
if (currentStep == maxStep) {
controlsManager.reachedEndOfGame();
}
console.log("current_step is " + currentStep + "maxStep is " + maxStep);
updateProgress(currentStep, maxStep);
}
function handleEntities(entitiesList) {
//console.log('entities count :' + entitiesList.length);
for (var i in entitiesList) {
var entity = entitiesList[i];
if (entity.hasId()) {
var idString = '' + entity.getId();
//if (idString == '8') {
// console.log('=========== UPDATING ENTITY ===================')
// logEntity(entity);
//}
//console.log('############## id string read as ' + idString + '###############');
if (masterEntities[idString] == undefined) {
if (entity.hasDelete() && entity.getDelete()) {
// do not add new entity that is marked as delete
}
else {
masterEntities[idString] = entity;
}
}
else {
if (entity.hasDelete() && entity.getDelete()) {
delete masterEntities[idString];
}
else {
var masterEntity = masterEntities[idString];
updateMasterEntity(masterEntity, entity);
}
}
//if (idString == '8') {
// console.log('=========== MASTER ENTITY AFTER UPDATE===================')
// logEntity(masterEntities[idString]);
//}
}
else {
console.log('-----ERROR----- no entity ID on entity');
}
}
renderState(gameboard_ctx, gameboard_canvas, masterEntities, gameScaleFactor, 0, 0, shapePositionMapForContext["game"]);
// disable zoom box for now
//drawZoomBox(gameboard_ctx, gameboard_canvas, zoomBoxOriginX, zoomBoxOriginY, zoomFactor);
//renderState(gameboard_zoom_ctx, gameboard_zoom_canvas, masterEntities, zoomFactor, zoomBoxOriginX, zoomBoxOriginY, shapePositionMapForContext["zoom"]);
}
function drawZoomBox(ctx, canvas, originX, originY, zoom) {
ctx.beginPath();
ctx.lineWidth = 1;
ctx.strokeStyle = 'white';
var width = canvas.width / zoom;
var height = canvas.height / zoom;
ctx.rect(originX, originY, width, height);
ctx.stroke();
//ctx.strokeRect(originX, originY, height, width);
}
function clearGameBoard(ctx, canvas, shapePositionMapKey) {
ctx.clearRect(0, 0, canvas.width, canvas.height);
//gameboard_ctx.clearRect(0, 0, gameboard_canvas.width, gameboard_canvas.height);
//gameboard_zoom_ctx.clearRect(0, 0, gameboard_zoom_canvas.width, gameboard_zoom_canvas.height);
shapePositionMapForContext[shapePositionMapKey] = {};
}
function clearExplanationControl(){
expl_ctrl_ctx.clearRect(0,0, expl_ctrl_canvas.width, expl_ctrl_canvas.height);
}
var draw_example_shapes = function () {
clearGameBoard(gameboard_ctx, gameboard_canvas, "game");
colorRGBA = getBasicColorRGBA();
drawRect(gameboard_ctx, 100, 100, 80, 80, colorRGBA);
drawTriangle(gameboard_ctx, 200, 200, 80, 'red');
}
var main = function () {
initUI();
//var redrawChartHiddenButton = document.createElement("BUTTON");
//redrawChartHiddenButton.setAttribute("id", "chartRedrawTriggerButton");
//redrawChartHiddenButton.appendChild(document.createTextNode("Refresh"));
//$("#scaii-game-controls").append(redrawChartHiddenButton);
var debug = true;
if (debug) {
var connectButton = document.createElement("BUTTON");
var connectText = document.createTextNode("Connect to Replay");
connectButton.setAttribute("class", "connectButton");
connectButton.setAttribute("id", "connectButton");
connectButton.appendChild(connectText);
connectButton.onclick = function () {
console.log("calling tryConnect");
tryConnect('.', 0);
};
$("#playback-panel").append(connectButton);
$("#connectButton").css("margin-left", "30px");
$("#connectButton").css("font-family", "Fira Sans");
$("#connectButton").css("font-size", "14px");
} else {
tryConnect('.', 0);
}
}
var configureSpeedSlider = function () {
speedSlider.setAttribute("type", "range");
speedSlider.setAttribute("min", "1");
speedSlider.setAttribute("max", "100");
speedSlider.setAttribute("value", "90");
speedSlider.setAttribute("class", "slider");
speedSlider.setAttribute("id", "speed-slider");
speedSlider.oninput = function () {
var speedString = "" + this.value;
var args = [speedString];
var userCommand = new proto.scaii.common.UserCommand;
userCommand.setCommandType(proto.scaii.common.UserCommand.UserCommandType.SET_SPEED);
userCommand.setArgsList(args);
stageUserCommand(userCommand);
}
//<input type="range" min="1" max="100" value="50" class="slider" id="myRange">
}
var configureZoomSlider = function () {
zoomSlider.setAttribute("type", "range");
zoomSlider.setAttribute("min", "100");
zoomSlider.setAttribute("max", "600");
zoomSlider.setAttribute("value", "200");
zoomSlider.setAttribute("class", "slider");
zoomSlider.setAttribute("id", "zoom-slider");
zoomSlider.oninput = function () {
zoomFactor = "" + this.value / 100;
console.log("zoom factor " + zoomFactor);
handleEntities(entitiesList);
}
}
var configureLabelContainer = function(id, fontSize, textVal, textAlign) {
$(id).css("font-family", "Fira Sans");
$(id).css("font-size", fontSize);
$(id).css("padding-left", "0px");
$(id).css("padding-right", "4px");
$(id).css("padding-top", "2px");
$(id).css("text-align", textAlign);
$(id).html(textVal);
}
var subtractPixels = function(a,b){
var intA = a.replace("px", "");
var intB = b.replace("px", "");
return intA - intB;
}
var configureExplanationControl = function() {
var container_width = $(".control-panel").css("width");
var container_padding = $(".control-panel").css("padding-right");
var can_width = subtractPixels(container_width,container_padding);
expl_ctrl_canvas.width = can_width;
expl_ctrl_canvas.height = 30;
$("#explanation-control-panel").append(expl_ctrl_canvas);
let ctx = expl_ctrl_ctx;
ctx.beginPath();
ctx.moveTo(0,explanationControlYPosition);
ctx.lineTo(can_width,explanationControlYPosition);
ctx.stroke();
console.log("drawing explanation control");
// expl_ctrl_canvas.background = 'red';
// ctx.save();
// var x = 0;
// var y = 13;
// var width = $("#replay-speed-panel").width();
// var height = 4;
// ctx.beginPath();
// ctx.lineWidth = 1;
// ctx.strokeStyle = 'black';
// ctx.strokeRect(x, y, width, height);
// ctx.fillStyle = 'white'
//ctx.fillStyle = colorRGBA;
// ctx.fillRect(x, y, width, height);
ctx.restore();
}
var initUI = function () {
configureSpeedSlider();
configureZoomSlider();
configureExplanationControl();
controlsManager.setControlsNotReady();
gameboard_canvas.width = 200;
gameboard_canvas.height = 200;
gameboard_zoom_canvas.width = gameboard_canvas.width;
gameboard_zoom_canvas.height = gameboard_canvas.height;
$("#scaii-gameboard").append(gameboard_canvas);
$("#scaii-gameboard").css("width", gameboard_canvas.width);
$("#scaii-gameboard").css("height", gameboard_canvas.height);
$("#scaii-gameboard").css("background-color", game_background_color);
$("#scaii-gameboard-zoom").append(gameboard_zoom_canvas);
$("#scaii-gameboard-zoom").css("width", gameboard_zoom_canvas.width);
$("#scaii-gameboard-zoom").css("height", gameboard_zoom_canvas.height);
$("#scaii-gameboard-zoom").css("background-color", game_background_color);
configureLabelContainer("#scaii-acronym","20px",systemAcronym, "center");
configureLabelContainer("#scaii-interface-title","16px",systemTitle, "center");
configureLabelContainer("#replay-speed-label","14px","replay speed", "right");
configureLabelContainer("#progress-label","14px","progress", "right");
configureLabelContainer("#explanation-control-label","14px","explanations", "right");
configureLabelContainer("#playback-label","14px","", "right");
$("#replay-speed-panel").append(speedSlider);
rewindButton.setAttribute("class", "playbackButton");
rewindButton.innerHTML = '<img src="imgs/rewind.png", height="8px" width="10px"/>';
rewindButton.onclick = tryRewind;
$("#playback-panel").append(rewindButton);
$("#scaii-game-controls").css("text-align", "center");
pauseResumeButton.setAttribute("class", "playbackButton");
pauseResumeButton.innerHTML = '<img src="imgs/pause.png", height="8px" width="10px"/>';
$("#playback-panel").append(pauseResumeButton);
pauseResumeButton.onclick = tryPause;
var zoomSliderLabel = document.createElement("div");
$("#scaii-zoom-controls").append(zoomSliderLabel);
zoomSliderLabel.setAttribute("id", "zoom-slider-label");
$("#zoom-slider-label").html("zoom");
$("#zoom-slider-label").css("font-family", "Fira Sans");
$("#zoom-slider-label").css("font-size", "12px");
$("#zoom-slider-label").css("padding-left", "6px");
$("#zoom-slider-label").css("padding-right", "4px");
$("#zoom-slider-label").css("padding-top", "2px");
$("#scaii-zoom-controls").append(zoomSlider);
$("#game-progress").click(processTimelineClick);
}
function clearGameBoards() {
clearGameBoard(gameboard_ctx, gameboard_canvas, "game");
clearGameBoard(gameboard_zoom_ctx, gameboard_zoom_canvas, "zoom");
}
function drawExplanationBox(step, type) {
var stepNumber = Number.parseInt(step);
var startX = 10 + step * 4;
var startY = 10;
timeline_ctx.moveTo(startX, startY);
timeline_ctx.lineTo(startX - 7, startY - 7);
timeline_ctx.lineTo(startX + 7, startY - 7);
timeline_ctx.moveTo(startX, startY);
timeline_ctx.stroke();
//timeline_ctx.addHitRegion({id: step});
}
// calls connect and paints "working" dots. If connect fails, it calls tryConnect again
function tryConnect(dots, attemptCount) {
clearGameBoards();
gameboard_ctx.font = "40px Georgia";
if (dots == '.') {
dots = '..';
}
else if (dots == '..') {
dots = '...';
}
else {
dots = '.';
}
attemptCount = attemptCount + 1;
$("#scaii-interface-title").html(systemTitle + " (... connecting " + attemptCount + " " + dots + ")");
//gameboard_ctx.fillText("connecting " + attemptCount + " " + dots, 10, 50);
connect(dots, attemptCount);
}
var drawExplanationBarChart = function () {
var options = {
//legend: { position: "none" },
title: 'Population of Largest U.S. Cities',
chartArea: { width: '50%' },
hAxis: {
title: 'Total Population',
minValue: 0
},
vAxis: {
title: 'City'
},
'width': 600,
'height': 400
};
var chartData = [
['Decision', 'r1', 'r2'],
['unit victorious', 0.77, 0.4],
['unit loses', -0.39, 0.6],
['adversary flees', 0.2, 0.3]
];
drawBarChart(chartData, options);
}
var ack = function(dealer){
var mm = new proto.scaii.common.MultiMessage;
dealer.send(mm.serializeBinary());
}
var connect = function (dots, attemptCount) {
dealer = new WebSocket('ws://localhost:6112');
dealer.binaryType = 'arraybuffer';
dealer.onopen = function (event) {
$("#scaii-interface-title").html(systemTitle);
console.log("WS Opened.");
};
dealer.onmessage = function (message) {
try {
sessionState = "inProgress";
var s = message.data;
var sPacket = proto.scaii.common.ScaiiPacket.deserializeBinary(s);
if (sPacket.hasReplaySessionConfig()) {
console.log("-----got replaySessionConfig");
var config = sPacket.getReplaySessionConfig();
replaySessionConfig = config;
//var selectedStep = undefined;
handleReplaySessionConfig(config);
ack(dealer);
}
else if (sPacket.hasVizInit()) {
console.log("-----got vizInit");
var vizInit = sPacket.getVizInit();
handleVizInit(vizInit);
controlsManager.gameStarted();
ack(dealer);
}
else if (sPacket.hasViz()) {
console.log("-----got Viz");
var viz = sPacket.getViz();
handleViz(viz);
// we're moving forward so rewind should be enabled
controlsManager.gameSteppingForward();
var mm;
if (testingMode) {
mm = buildReturnMultiMessageFromState(masterEntities);
}
else {
mm = new proto.scaii.common.MultiMessage;
}
dealer.send(mm.serializeBinary());
}
else if (sPacket.hasExplDetails()) {
console.log('has expl details');
var explDetails = sPacket.getExplDetails();
handleExplDetails(explDetails);
ack(dealer);
}
else if (sPacket.hasReplayControl()) {
console.log("-----got replayCOntrol");
var replayControl = sPacket.getReplayControl();
handleReplayControl(replayControl);
ack(dealer);
}
else if (sPacket.hasErr()) {
console.log("-----got errorPkt");
console.log(sPacket.getErr().getDescription())
ack(dealer);
}
else if (sPacket.hasUserCommand()) {
console.log("-----got userCommand");
var userCommand = sPacket.getUserCommand();
var commandType = userCommand.getCommandType();
if (commandType == proto.scaii.common.UserCommand.UserCommandType.POLL_FOR_COMMANDS) {
var mm;
if (userCommandScaiiPackets.length > 0) {
mm = buildResponseToReplay(userCommandScaiiPackets);
dealer.send(mm.serializeBinary());
controlsManager.userCommandSent();
}
else {
mm = new proto.scaii.common.MultiMessage;
dealer.send(mm.serializeBinary());
}
userCommandScaiiPackets = [];
}
else if (commandType == proto.scaii.common.UserCommand.UserCommandType.JUMP_COMPLETED) {
controlsManager.jumpCompleted();
ack(dealer);
}
}
else {
console.log(sPacket.toString())
console.log('unexpected message from system!');
ack(dealer);
}
}
catch (err) {
alert(err.message);
}
};
dealer.onclose = function (closeEvent) {
console.log("closefired " + attemptCount);
if (sessionState == "pending") {
// the closed connection was likely due to failed connection. try reconnecting
setTimeout(function () { tryConnect(dots, attemptCount); }, 2000);
}
};
dealer.onerror = function (err) {
console.log("Error: " + err);
alert("Error: " + err);
};
};
var then = Date.now();
main(); | selection of explanations reflected in color of selector
| viz/js/scaiiViz.js | selection of explanations reflected in color of selector | <ide><path>iz/js/scaiiViz.js
<ide> var args = ['' +matchingStep];
<ide> userCommand.setArgsList(args);
<ide> stageUserCommand(userCommand);
<del> //handleReplaySessionConfig(replaySessionConfig,matchingStep);
<add> handleReplaySessionConfig(replaySessionConfig,matchingStep);
<ide> // var targetStepString = '' + matchingStep;
<ide> // var args = [targetStepString];
<ide> // var userCommand = new proto.scaii.common.UserCommand;
<ide> }
<ide> }
<ide> }
<del>function handleReplaySessionConfig(rsc) {
<add>function handleReplaySessionConfig(rsc, selectedStep) {
<ide> explanationBoxMap = {};
<ide> if (rsc.hasStepCount()) {
<ide> maxStep = rsc.getStepCount() - 1;
<ide> var index = 0;
<ide> while (index < expl_count){
<ide> var step = explanation_steps[index];
<add> var selected = false;
<add> if (selectedStep == step){
<add> selected = true;
<add> }
<ide> var title = explanation_titles[index];
<del> configureExplanation(rsc.getStepCount(), step, title);
<add> configureExplanation(rsc.getStepCount(), step, title, selected);
<ide> index = index + 1;
<ide> }
<ide> }
<ide> var config = sPacket.getReplaySessionConfig();
<ide> replaySessionConfig = config;
<ide> //var selectedStep = undefined;
<del> handleReplaySessionConfig(config);
<add> handleReplaySessionConfig(config,undefined);
<ide> ack(dealer);
<ide> }
<ide> else if (sPacket.hasVizInit()) { |
|
Java | agpl-3.0 | 8ebf5d78bc1fb1d4e34deb5ed798fc52658aa73e | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | edc7ac5e-2e61-11e5-9284-b827eb9e62be | hello.java | edc208ee-2e61-11e5-9284-b827eb9e62be | edc7ac5e-2e61-11e5-9284-b827eb9e62be | hello.java | edc7ac5e-2e61-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>edc208ee-2e61-11e5-9284-b827eb9e62be
<add>edc7ac5e-2e61-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | 081593ac1d40ac343b43864d1d10ad165de15806 | 0 | AsuraTeam/dubbos,AsuraTeam/dubbos,AsuraTeam/dubbos | /*
* Copyright 1999-2011 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.rpc.cluster.support;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.alibaba.dubbo.common.Constants;
import com.alibaba.dubbo.common.Version;
import com.alibaba.dubbo.common.logger.Logger;
import com.alibaba.dubbo.common.logger.LoggerFactory;
import com.alibaba.dubbo.common.utils.NetUtils;
import com.alibaba.dubbo.rpc.Invocation;
import com.alibaba.dubbo.rpc.Invoker;
import com.alibaba.dubbo.rpc.Result;
import com.alibaba.dubbo.rpc.RpcContext;
import com.alibaba.dubbo.rpc.RpcException;
import com.alibaba.dubbo.rpc.cluster.Directory;
import com.alibaba.dubbo.rpc.cluster.LoadBalance;
/**
* 失败转移,当出现失败,重试其它服务器,通常用于读操作,但重试会带来更长延迟。
*
* <a href="http://en.wikipedia.org/wiki/Failover">Failover</a>
*
* @author william.liangf
*/
public class FailoverClusterInvoker<T> extends AbstractClusterInvoker<T> {
private static final Logger logger = LoggerFactory.getLogger(FailoverClusterInvoker.class);
public FailoverClusterInvoker(Directory<T> directory) {
super(directory);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public Result doInvoke(Invocation invocation, List<Invoker<T>> invokers, LoadBalance loadbalance) throws RpcException {
checkInvokers(invokers, invocation);
int len = getUrl().getMethodParameter(invocation.getMethodName(), Constants.RETRIES_KEY, Constants.DEFAULT_RETRIES) + 1;
if (len <= 0) {
len = 1;
}
// retry loop.
RpcException le = null; // last exception.
List<Invoker<T>> invoked = new ArrayList<Invoker<T>>(invokers.size()); // invoked invokers.
Set<String> providers = new HashSet<String>(len);
for (int i = 0; i < len; i++) {
Invoker<T> invoker = select(loadbalance, invocation, invokers, invoked);
invoked.add(invoker);
RpcContext.getContext().setInvokers((List)invoked);
try {
Result result = invoker.invoke(invocation);
if (le != null && logger.isWarnEnabled()) {
logger.warn("Although retry the method " + invocation.getMethodName()
+ " in the service " + getInterface().getName()
+ " was successful by the provider " + invoker.getUrl().getAddress()
+ ", but there have been failed providers " + providers + "/" + invokers.size()
+ " from the registry " + directory.getUrl().getAddress()
+ " on the consumer " + NetUtils.getLocalHost()
+ " using the dubbo version " + Version.getVersion() + ". Last error is: "
+ le.getMessage(), le);
}
return result;
} catch (RpcException e) {
if (e.isBiz()) { // biz exception.
throw e;
}
le = e;
} catch (Throwable e) {
le = new RpcException(e.getMessage(), e);
} finally {
providers.add(invoker.getUrl().getAddress());
}
}
throw new RpcException(le != null ? le.getCode() : 0, "Failed to invoke the method "
+ invocation.getMethodName() + " in the service " + getInterface().getName()
+ ". Tried " + len + " times of the providers " + providers + "/" + invokers.size() + " from the registry "
+ directory.getUrl().getAddress()
+ " on the consumer " + NetUtils.getLocalHost() + " using the dubbo version "
+ Version.getVersion() + ". Last error is: "
+ (le != null ? le.getMessage() : ""), le);
}
} | dubbo-cluster/src/main/java/com/alibaba/dubbo/rpc/cluster/support/FailoverClusterInvoker.java | /*
* Copyright 1999-2011 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.rpc.cluster.support;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.alibaba.dubbo.common.Constants;
import com.alibaba.dubbo.common.Version;
import com.alibaba.dubbo.common.logger.Logger;
import com.alibaba.dubbo.common.logger.LoggerFactory;
import com.alibaba.dubbo.common.utils.NetUtils;
import com.alibaba.dubbo.rpc.Invocation;
import com.alibaba.dubbo.rpc.Invoker;
import com.alibaba.dubbo.rpc.Result;
import com.alibaba.dubbo.rpc.RpcContext;
import com.alibaba.dubbo.rpc.RpcException;
import com.alibaba.dubbo.rpc.cluster.Directory;
import com.alibaba.dubbo.rpc.cluster.LoadBalance;
/**
* 失败转移,当出现失败,重试其它服务器,通常用于读操作,但重试会带来更长延迟。
*
* <a href="http://en.wikipedia.org/wiki/Failover">Failover</a>
*
* @author william.liangf
*/
public class FailoverClusterInvoker<T> extends AbstractClusterInvoker<T> {
private static final Logger logger = LoggerFactory.getLogger(FailoverClusterInvoker.class);
public FailoverClusterInvoker(Directory<T> directory) {
super(directory);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public Result doInvoke(Invocation invocation, List<Invoker<T>> invokers, LoadBalance loadbalance) throws RpcException {
checkInvokers(invokers, invocation);
int len = getUrl().getMethodParameter(invocation.getMethodName(), Constants.RETRIES_KEY, Constants.DEFAULT_RETRIES) + 1;
if (len <= 0) {
len = 1;
}
// retry loop.
RpcException le = null; // last exception.
List<Invoker<T>> invoked = new ArrayList<Invoker<T>>(invokers.size()); // invoked invokers.
Set<String> providers = new HashSet<String>(len);
for (int i = 0; i < len; i++) {
Invoker<T> invoker = select(loadbalance, invocation, invokers, invoked);
invoked.add(invoker);
RpcContext.getContext().setInvokers((List)invoked);
try {
Result result = invoker.invoke(invocation);
if (le != null && logger.isWarnEnabled()) {
logger.warn("Although retry the method " + invocation.getMethodName()
+ " in the service " + getInterface().getName()
+ " was successful by the provider " + invoker.getUrl().getAddress()
+ ", but there have been failed providers " + providers
+ " from the registry " + directory.getUrl().getAddress()
+ " on the consumer " + NetUtils.getLocalHost()
+ " using the dubbo version " + Version.getVersion() + ". Last error is: "
+ le.getMessage(), le);
}
return result;
} catch (RpcException e) {
if (e.isBiz()) { // biz exception.
throw e;
}
le = e;
} catch (Throwable e) {
le = new RpcException(e.getMessage(), e);
} finally {
providers.add(invoker.getUrl().getAddress());
}
}
throw new RpcException(le != null ? le.getCode() : 0, "Failed to invoke the method "
+ invocation.getMethodName() + " in the service " + getInterface().getName()
+ ". Tried " + len + " times of the providers " + providers + " from the registry "
+ directory.getUrl().getAddress()
+ " on the consumer " + NetUtils.getLocalHost() + " using the dubbo version "
+ Version.getVersion() + ". Last error is: "
+ (le != null ? le.getMessage() : ""), le);
}
} | DUBBO-182 修改Failover调用出错信息
git-svn-id: 3d0e7b608a819e97e591a7b753bfd1a27aaeb5ee@844 1a56cb94-b969-4eaa-88fa-be21384802f2
| dubbo-cluster/src/main/java/com/alibaba/dubbo/rpc/cluster/support/FailoverClusterInvoker.java | DUBBO-182 修改Failover调用出错信息 | <ide><path>ubbo-cluster/src/main/java/com/alibaba/dubbo/rpc/cluster/support/FailoverClusterInvoker.java
<ide> logger.warn("Although retry the method " + invocation.getMethodName()
<ide> + " in the service " + getInterface().getName()
<ide> + " was successful by the provider " + invoker.getUrl().getAddress()
<del> + ", but there have been failed providers " + providers
<add> + ", but there have been failed providers " + providers + "/" + invokers.size()
<ide> + " from the registry " + directory.getUrl().getAddress()
<ide> + " on the consumer " + NetUtils.getLocalHost()
<ide> + " using the dubbo version " + Version.getVersion() + ". Last error is: "
<ide> }
<ide> throw new RpcException(le != null ? le.getCode() : 0, "Failed to invoke the method "
<ide> + invocation.getMethodName() + " in the service " + getInterface().getName()
<del> + ". Tried " + len + " times of the providers " + providers + " from the registry "
<add> + ". Tried " + len + " times of the providers " + providers + "/" + invokers.size() + " from the registry "
<ide> + directory.getUrl().getAddress()
<ide> + " on the consumer " + NetUtils.getLocalHost() + " using the dubbo version "
<ide> + Version.getVersion() + ". Last error is: " |
|
Java | mit | 7d0c0e6252687da32ccac50d4ac741346592f15b | 0 | toadette/mtg_insight | package de.avalax.mtg_insight.port.adapter.service;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import de.avalax.mtg_insight.domain.model.card.Card;
import de.avalax.mtg_insight.domain.model.card.permanent.creature.Creature;
import de.avalax.mtg_insight.domain.model.deck.Deck;
import de.avalax.mtg_insight.domain.model.deck.DeckService;
import de.avalax.mtg_insight.domain.model.deck.Deckname;
import de.avalax.mtg_insight.domain.model.deck.StandardDeck;
public class TappedOutDeckService implements DeckService {
private List<Deckname> decknames;
private final String host = "http://tappedout.net/mtg-decks/";
private final String format = "/?fmt=txt";
private CardService cardService;
public TappedOutDeckService(CardService cardService) {
this.cardService = cardService;
decknames = new ArrayList<>();
}
private Deck readFromFile(String name) throws IOException {
decknames.add(new Deckname(name));
URL url = new URL(host + name + format);
BufferedReader reader = null;
List<Card> cardOfDeck = new ArrayList<>();
try {
String line = null;
reader = new BufferedReader(new InputStreamReader(url.openStream()));
while ((line = reader.readLine()) != null) {
if (line.length() > 0) {
addCardFromLine(line, cardOfDeck);
}
}
} finally {
if (reader != null) {
reader.close();
}
}
return new StandardDeck(new Deckname(name), cardOfDeck);
}
private void addCardFromLine(String line, List<Card> cardOfDeck) {
String[] split = line.split("\\t");
int count = Integer.valueOf(split[0]);
String name = split[1];
for (int i = 0; i < count; i++) {
cardOfDeck.add(new Creature(name,null,null,null));
}
}
@Override
public Deck deckFromDeckname(Deckname deckname) {
Deck deck;
try {
deck = readFromFile(deckname.getName());
} catch (Exception e) {
throw new RuntimeException("Error TappedOutDeckService: " + e.getCause().getLocalizedMessage());
}
return deck;
}
@Override
public List<Deckname> decknames() {
return decknames;
}
}
| port/src/main/java/de/avalax/mtg_insight/port/adapter/service/TappedOutDeckService.java | package de.avalax.mtg_insight.port.adapter.service;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import de.avalax.mtg_insight.domain.model.card.Card;
import de.avalax.mtg_insight.domain.model.card.permanent.creature.Creature;
import de.avalax.mtg_insight.domain.model.deck.Deck;
import de.avalax.mtg_insight.domain.model.deck.DeckService;
import de.avalax.mtg_insight.domain.model.deck.Deckname;
import de.avalax.mtg_insight.domain.model.deck.StandardDeck;
public class TappedOutDeckService implements DeckService {
private List<Deckname> decknames;
private final String host = "http://tappedout.net/mtg-decks/";
private final String format = "/?fmt=txt";
public TappedOutDeckService(String deckname, CardService cardService) {
decknames = new ArrayList<>();
decknames.add(new Deckname(deckname));
}
private Deck readFromFile(String name) throws IOException {
URL url = new URL(host + name + format);
BufferedReader reader = null;
List<Card> cardOfDeck = new ArrayList<>();
try {
String line = null;
reader = new BufferedReader(new InputStreamReader(url.openStream()));
while ((line = reader.readLine()) != null) {
if (line.length() > 0) {
addCardFromLine(line, cardOfDeck);
}
}
} finally {
if (reader != null) {
reader.close();
}
}
return new StandardDeck(new Deckname(name), cardOfDeck);
}
private void addCardFromLine(String line, List<Card> cardOfDeck) {
String[] split = line.split("\\t");
int count = Integer.valueOf(split[0]);
String name = split[1];
for (int i = 0; i < count; i++) {
cardOfDeck.add(new Creature(name,null,null,null));
}
}
@Override
public Deck deckFromDeckname(Deckname deckname) {
Deck deck;
try {
deck = readFromFile(deckname.getName());
} catch (Exception e) {
throw new RuntimeException("Error TappedOutDeckService: " + e.getCause().getLocalizedMessage());
}
return deck;
}
@Override
public List<Deckname> decknames() {
return decknames;
}
}
| remove deckname from constructor
| port/src/main/java/de/avalax/mtg_insight/port/adapter/service/TappedOutDeckService.java | remove deckname from constructor | <ide><path>ort/src/main/java/de/avalax/mtg_insight/port/adapter/service/TappedOutDeckService.java
<ide> private List<Deckname> decknames;
<ide> private final String host = "http://tappedout.net/mtg-decks/";
<ide> private final String format = "/?fmt=txt";
<add> private CardService cardService;
<ide>
<del> public TappedOutDeckService(String deckname, CardService cardService) {
<add> public TappedOutDeckService(CardService cardService) {
<add> this.cardService = cardService;
<ide> decknames = new ArrayList<>();
<del> decknames.add(new Deckname(deckname));
<ide> }
<ide>
<ide> private Deck readFromFile(String name) throws IOException {
<add> decknames.add(new Deckname(name));
<ide> URL url = new URL(host + name + format);
<ide> BufferedReader reader = null;
<ide> List<Card> cardOfDeck = new ArrayList<>(); |
|
Java | mit | 27495a0c64dac3fd0be9fc91b947ba337b80bed4 | 0 | iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable | /*
* Copyright (c) 2010 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.commandline;
import org.apache.log4j.*;
import org.broadinstitute.sting.gatk.CommandLineGATK;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.help.ApplicationDetails;
import org.broadinstitute.sting.utils.help.HelpFormatter;
import java.io.IOException;
import java.util.*;
public abstract class CommandLineProgram {
/** The command-line program and the arguments it returned. */
protected ParsingEngine parser = null;
/** the default log level */
@Argument(fullName = "logging_level",
shortName = "l",
doc = "Set the minimum level of logging, i.e. setting INFO get's you INFO up to FATAL, setting ERROR gets you ERROR and FATAL level logging.",
required = false)
protected String logging_level = "INFO";
/** where to send the output of our logger */
@Output(fullName = "log_to_file",
shortName = "log",
doc = "Set the logging location",
required = false)
protected String toFile = null;
/** do we want to generate debugging information with the logs */
@Argument(fullName = "debug_mode",
shortName = "debug",
doc = "Set the logging file string to include a lot of debugging information (SLOW!)",
required = false)
protected Boolean debugMode = false;
/** this is used to indicate if they've asked for help */
@Argument(fullName = "help", shortName = "h", doc = "Generate this help message", required = false)
public Boolean help = false;
/** our logging output patterns */
private static String patternString = "%-5p %d{HH:mm:ss,SSS} %C{1} - %m %n";
private static String debugPatternString = "%n[level] %p%n[date]\t\t %d{dd MMM yyyy HH:mm:ss,SSS} %n[class]\t\t %C %n[location]\t %l %n[line number]\t %L %n[message]\t %m %n";
static {
/**
* The very first thing that any Sting application does is forces the JVM locale into US English, so that we don't have
* to think about number formatting issues.
*/
forceJVMLocaleToUSEnglish();
// setup a basic log configuration
CommandLineUtils.configureConsoleLogging();
}
/**
* Allows a given application to return a brief description of itself.
*
* @return An ApplicationDetails object describing the current application. Should not be null.
*/
protected ApplicationDetails getApplicationDetails() {
return new ApplicationDetails(ApplicationDetails.createDefaultHeader(getClass()),
Collections.<String>emptyList(),
ApplicationDetails.createDefaultRunningInstructions(getClass()),
null);
}
/**
* Subclasses of CommandLinePrograms can provide their own types of command-line arguments.
* @return A collection of type descriptors generating implementation-dependent placeholders.
*/
protected Collection<ArgumentTypeDescriptor> getArgumentTypeDescriptors() {
return Collections.emptyList();
}
/**
* Will this application want to vary its argument list dynamically?
* If so, parse the command-line options and then prompt the subclass to return
* a list of argument providers.
*
* @return Whether the application should vary command-line arguments dynamically.
*/
protected boolean canAddArgumentsDynamically() { return false; }
/**
* Provide a list of object to inspect, looking for additional command-line arguments.
*
* @return A list of objects to inspect.
*/
protected Class[] getArgumentSources() {
return new Class[]{};
}
/**
* Name this argument source. Provides the (full) class name as a default.
*
* @param source The argument source.
*
* @return a name for the argument source.
*/
protected String getArgumentSourceName( Class source ) { return source.toString(); }
/**
* Sets the command-line parsing engine. Necessary for unit testing purposes.
* @param parser the new command-line parsing engine
*/
public void setParser( ParsingEngine parser ) {
this.parser = parser;
}
/**
* this is the function that the inheriting class can expect to have called
* when all the argument processing is done
*
* @return the return code to exit the program with
* @throws Exception when an exception occurs
*/
protected abstract int execute() throws Exception;
public static int result = -1;
/**
* This function is called to start processing the command line, and kick
* off the execute message of the program.
*
* @param clp the command line program to execute
* @param args the command line arguments passed in
* @throws Exception when an exception occurs
*/
@SuppressWarnings("unchecked")
public static void start(CommandLineProgram clp, String[] args) throws Exception {
try {
// setup our log layout
PatternLayout layout = new PatternLayout();
Logger logger = CommandLineUtils.getStingLogger();
// now set the layout of all the loggers to our layout
CommandLineUtils.setLayout(logger, layout);
// Initialize the logger using the defaults.
clp.setupLoggerLevel(layout);
// setup the parser
ParsingEngine parser = clp.parser = new ParsingEngine(clp);
parser.addArgumentSource(clp.getClass());
// process the args
if (clp.canAddArgumentsDynamically()) {
// if the command-line program can toss in extra args, fetch them and reparse the arguments.
parser.parse(args);
// Allow invalid and missing required arguments to pass this validation step.
// - InvalidArgument in case these arguments are specified by plugins.
// - MissingRequiredArgument in case the user requested help. Handle that later, once we've
// determined the full complement of arguments.
parser.validate(EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument,
ParsingEngine.ValidationType.InvalidArgument));
parser.loadArgumentsIntoObject(clp);
// Initialize the logger using the loaded command line.
clp.setupLoggerLevel(layout);
Class[] argumentSources = clp.getArgumentSources();
for (Class argumentSource : argumentSources)
parser.addArgumentSource(clp.getArgumentSourceName(argumentSource), argumentSource);
parser.parse(args);
if (isHelpPresent(parser))
printHelpAndExit(clp, parser);
parser.validate();
} else {
parser.parse(args);
if (isHelpPresent(parser))
printHelpAndExit(clp, parser);
parser.validate();
parser.loadArgumentsIntoObject(clp);
// Initialize the logger using the loaded command line.
clp.setupLoggerLevel(layout);
}
// if they specify a log location, output our data there
if (clp.toFile != null) {
FileAppender appender;
try {
appender = new FileAppender(layout, clp.toFile, false);
logger.addAppender(appender);
} catch (IOException e) {
throw new RuntimeException("Unable to re-route log output to " + clp.toFile + " make sure the destination exists");
}
}
// regardless of what happens next, generate the header information
HelpFormatter.generateHeaderInformation(clp.getApplicationDetails(), args);
// call the execute
CommandLineProgram.result = clp.execute();
}
catch (ArgumentException e) {
clp.parser.printHelp(clp.getApplicationDetails());
// Rethrow the exception to exit with an error.
throw e;
}
}
/**
* Find fields in the object obj that look like command-line arguments, and put command-line
* arguments into them.
*
* @param obj Object to inspect for command line arguments.
*/
public void loadArgumentsIntoObject(Object obj) {
parser.loadArgumentsIntoObject(obj);
}
/**
* this function checks the logger level passed in on the command line, taking the lowest
* level that was provided.
* @param layout Pattern layout to format based on the logger level.
*/
@SuppressWarnings("unchecked")
private void setupLoggerLevel(PatternLayout layout) {
// if we're in debug mode, set the mode up
if (debugMode) {
layout.setConversionPattern(debugPatternString);
} else {
layout.setConversionPattern(patternString);
}
// set the default logger level
Level par;
if (logging_level.toUpperCase().equals("DEBUG")) {
par = Level.DEBUG;
} else if (logging_level.toUpperCase().equals("ERROR")) {
par = Level.ERROR;
} else if (logging_level.toUpperCase().equals("FATAL")) {
par = Level.FATAL;
} else if (logging_level.toUpperCase().equals("INFO")) {
par = Level.INFO;
} else if (logging_level.toUpperCase().equals("WARN")) {
par = Level.WARN;
} else if (logging_level.toUpperCase().equals("OFF")) {
par = Level.OFF;
} else {
// we don't understand the logging level, let's get out of here
throw new ArgumentException("Unable to match: " + logging_level + " to a logging level, make sure it's a valid level (INFO, DEBUG, ERROR, FATAL, OFF)");
}
Logger.getRootLogger().setLevel(par);
}
/**
* a function used to indicate an error occurred in the command line tool
*/
private static void printDocumentationReference() {
errorPrintf("Visit our wiki for extensive documentation http://www.broadinstitute.org/gsa/wiki%n");
errorPrintf("Visit our forum to view answers to commonly asked questions http://getsatisfaction.com/gsa%n");
}
/**
* Do a cursory search for the given argument.
*
* @param parser Parser
*
* @return True if help is present; false otherwise.
*/
private static boolean isHelpPresent(ParsingEngine parser) {
return parser.isArgumentPresent("help");
}
/**
* Print help and exit.
*
* @param clp Instance of the command-line program.
* @param parser True if help is present; false otherwise.
*/
private static void printHelpAndExit(CommandLineProgram clp, ParsingEngine parser) {
parser.printHelp(clp.getApplicationDetails());
System.exit(0);
}
private static void errorPrintf(String format, Object... s) {
String formatted = String.format(format, s);
if ( formatted.trim().equals("") )
System.err.println("##### ERROR");
else {
for ( String part : formatted.split("\n") ) {
System.err.println("##### ERROR " + part);
}
}
}
/**
* used to indicate an error occured
*
* @param msg the message
* @param e the error
*/
public static void exitSystemWithError(String msg, final Exception e) {
errorPrintf("------------------------------------------------------------------------------------------%n");
errorPrintf("stack trace %n");
e.printStackTrace();
errorPrintf("------------------------------------------------------------------------------------------%n");
errorPrintf("A GATK RUNTIME ERROR has occurred (version %s):%n", CommandLineGATK.getVersionNumber());
errorPrintf("%n");
errorPrintf("Please visit the wiki to see if this is a known problem%n");
errorPrintf("If not, please post the error, with stack trace, to the GATK forum%n");
printDocumentationReference();
if ( msg == null ) // some exceptions don't have detailed messages
msg = "Code exception (see stack trace for error itself)";
errorPrintf("%n");
errorPrintf("MESSAGE: %s%n", msg.trim());
errorPrintf("------------------------------------------------------------------------------------------%n");
System.exit(1);
}
public static void exitSystemWithUserError(final Exception e) {
if ( e.getMessage() == null )
throw new ReviewedStingException("UserException found with no message!", e);
errorPrintf("------------------------------------------------------------------------------------------%n");
errorPrintf("A USER ERROR has occurred (version %s): %n", CommandLineGATK.getVersionNumber());
errorPrintf("The invalid arguments or inputs must be corrected before the GATK can proceed%n");
errorPrintf("Please do not post this error to the GATK forum%n");
errorPrintf("%n");
errorPrintf("See the documentation (rerun with -h) for this tool to view allowable command-line arguments.%n");
printDocumentationReference();
errorPrintf("%n");
errorPrintf("MESSAGE: %s%n", e.getMessage().trim());
errorPrintf("------------------------------------------------------------------------------------------%n");
System.exit(1);
}
public static void exitSystemWithSamError(final Exception e) {
if ( e.getMessage() == null )
throw new ReviewedStingException("SamException found with no message!", e);
errorPrintf("------------------------------------------------------------------------------------------%n");
errorPrintf("A BAM ERROR has occurred (version %s): %n", CommandLineGATK.getVersionNumber());
errorPrintf("The invalid inputs must be corrected before the GATK can proceed%n");
errorPrintf("Please do not post this error to the GATK forum until you have followed the instructions below%n");
errorPrintf("%n");
errorPrintf("Please make sure that your BAM file is well-formed by running Picard's validator on it%n");
errorPrintf("(see http://picard.sourceforge.net/command-line-overview.shtml#ValidateSamFile for details)%n");
errorPrintf("Also, please ensure that your BAM index is not corrupted: delete the current one and regenerate it with 'samtools index'%n");
printDocumentationReference();
errorPrintf("%n");
errorPrintf("MESSAGE: %s%n", e.getMessage().trim());
errorPrintf("------------------------------------------------------------------------------------------%n");
System.exit(1);
}
/**
* used to indicate an error occured
*
* @param e the exception occured
*/
public static void exitSystemWithError(Exception e) {
exitSystemWithError(e.getMessage(), e);
}
/**
* A hack to ensure that numbers are always formatted in the US style.
*/
protected static void forceJVMLocaleToUSEnglish() {
Locale.setDefault(Locale.US);
}
}
| java/src/org/broadinstitute/sting/commandline/CommandLineProgram.java | /*
* Copyright (c) 2010 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.commandline;
import org.apache.log4j.*;
import org.broadinstitute.sting.gatk.CommandLineGATK;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.help.ApplicationDetails;
import org.broadinstitute.sting.utils.help.HelpFormatter;
import java.io.IOException;
import java.util.*;
public abstract class CommandLineProgram {
/** The command-line program and the arguments it returned. */
protected ParsingEngine parser = null;
/** the default log level */
@Argument(fullName = "logging_level",
shortName = "l",
doc = "Set the minimum level of logging, i.e. setting INFO get's you INFO up to FATAL, setting ERROR gets you ERROR and FATAL level logging.",
required = false)
protected String logging_level = "INFO";
/** where to send the output of our logger */
@Output(fullName = "log_to_file",
shortName = "log",
doc = "Set the logging location",
required = false)
protected String toFile = null;
/** do we want to silence the command line output */
@Argument(fullName = "quiet_output_mode",
shortName = "quiet",
doc = "Set the logging to quiet mode, no output to stdout",
required = false)
protected Boolean quietMode = false;
/** do we want to generate debugging information with the logs */
@Argument(fullName = "debug_mode",
shortName = "debug",
doc = "Set the logging file string to include a lot of debugging information (SLOW!)",
required = false)
protected Boolean debugMode = false;
/** this is used to indicate if they've asked for help */
@Argument(fullName = "help", shortName = "h", doc = "Generate this help message", required = false)
public Boolean help = false;
/** our logging output patterns */
private static String patternString = "%-5p %d{HH:mm:ss,SSS} %C{1} - %m %n";
private static String debugPatternString = "%n[level] %p%n[date]\t\t %d{dd MMM yyyy HH:mm:ss,SSS} %n[class]\t\t %C %n[location]\t %l %n[line number]\t %L %n[message]\t %m %n";
static {
/**
* The very first thing that any Sting application does is forces the JVM locale into US English, so that we don't have
* to think about number formatting issues.
*/
forceJVMLocaleToUSEnglish();
// setup a basic log configuration
CommandLineUtils.configureConsoleLogging();
}
/**
* Allows a given application to return a brief description of itself.
*
* @return An ApplicationDetails object describing the current application. Should not be null.
*/
protected ApplicationDetails getApplicationDetails() {
return new ApplicationDetails(ApplicationDetails.createDefaultHeader(getClass()),
Collections.<String>emptyList(),
ApplicationDetails.createDefaultRunningInstructions(getClass()),
null);
}
/**
* Subclasses of CommandLinePrograms can provide their own types of command-line arguments.
* @return A collection of type descriptors generating implementation-dependent placeholders.
*/
protected Collection<ArgumentTypeDescriptor> getArgumentTypeDescriptors() {
return Collections.emptyList();
}
/**
* Will this application want to vary its argument list dynamically?
* If so, parse the command-line options and then prompt the subclass to return
* a list of argument providers.
*
* @return Whether the application should vary command-line arguments dynamically.
*/
protected boolean canAddArgumentsDynamically() { return false; }
/**
* Provide a list of object to inspect, looking for additional command-line arguments.
*
* @return A list of objects to inspect.
*/
protected Class[] getArgumentSources() {
return new Class[]{};
}
/**
* Name this argument source. Provides the (full) class name as a default.
*
* @param source The argument source.
*
* @return a name for the argument source.
*/
protected String getArgumentSourceName( Class source ) { return source.toString(); }
/**
* Sets the command-line parsing engine. Necessary for unit testing purposes.
* @param parser the new command-line parsing engine
*/
public void setParser( ParsingEngine parser ) {
this.parser = parser;
}
/**
* this is the function that the inheriting class can expect to have called
* when all the argument processing is done
*
* @return the return code to exit the program with
* @throws Exception when an exception occurs
*/
protected abstract int execute() throws Exception;
public static int result = -1;
/**
* This function is called to start processing the command line, and kick
* off the execute message of the program.
*
* @param clp the command line program to execute
* @param args the command line arguments passed in
* @throws Exception when an exception occurs
*/
@SuppressWarnings("unchecked")
public static void start(CommandLineProgram clp, String[] args) throws Exception {
try {
// setup our log layout
PatternLayout layout = new PatternLayout();
Logger logger = CommandLineUtils.getStingLogger();
// now set the layout of all the loggers to our layout
CommandLineUtils.setLayout(logger, layout);
// Initialize the logger using the defaults.
clp.setupLoggerLevel(layout);
// setup the parser
ParsingEngine parser = clp.parser = new ParsingEngine(clp);
parser.addArgumentSource(clp.getClass());
// process the args
if (clp.canAddArgumentsDynamically()) {
// if the command-line program can toss in extra args, fetch them and reparse the arguments.
parser.parse(args);
// Allow invalid and missing required arguments to pass this validation step.
// - InvalidArgument in case these arguments are specified by plugins.
// - MissingRequiredArgument in case the user requested help. Handle that later, once we've
// determined the full complement of arguments.
parser.validate(EnumSet.of(ParsingEngine.ValidationType.MissingRequiredArgument,
ParsingEngine.ValidationType.InvalidArgument));
parser.loadArgumentsIntoObject(clp);
// Initialize the logger using the loaded command line.
clp.setupLoggerLevel(layout);
Class[] argumentSources = clp.getArgumentSources();
for (Class argumentSource : argumentSources)
parser.addArgumentSource(clp.getArgumentSourceName(argumentSource), argumentSource);
parser.parse(args);
if (isHelpPresent(parser))
printHelpAndExit(clp, parser);
parser.validate();
} else {
parser.parse(args);
if (isHelpPresent(parser))
printHelpAndExit(clp, parser);
parser.validate();
parser.loadArgumentsIntoObject(clp);
// Initialize the logger using the loaded command line.
clp.setupLoggerLevel(layout);
}
// if they set the mode to quiet
if (clp.quietMode) {
// the only appender we should have is stdout, the following meathod is
// deprecated, but the standard remove all appenders doesn't seem to work
// TODO: find the right function
//Category root = Category.getRoot();
//root.removeAllAppenders();
//logger.removeAllAppenders();
}
// if they specify a log location, output our data there
if (clp.toFile != null) {
FileAppender appender;
try {
appender = new FileAppender(layout, clp.toFile, false);
logger.addAppender(appender);
} catch (IOException e) {
throw new RuntimeException("Unable to re-route log output to " + clp.toFile + " make sure the destination exists");
}
}
// regardless of what happens next, generate the header information
HelpFormatter.generateHeaderInformation(clp.getApplicationDetails(), args);
// call the execute
CommandLineProgram.result = clp.execute();
}
catch (ArgumentException e) {
clp.parser.printHelp(clp.getApplicationDetails());
// Rethrow the exception to exit with an error.
throw e;
}
}
/**
* Find fields in the object obj that look like command-line arguments, and put command-line
* arguments into them.
*
* @param obj Object to inspect for command line arguments.
*/
public void loadArgumentsIntoObject(Object obj) {
parser.loadArgumentsIntoObject(obj);
}
/**
* this function checks the logger level passed in on the command line, taking the lowest
* level that was provided.
* @param layout Pattern layout to format based on the logger level.
*/
@SuppressWarnings("unchecked")
private void setupLoggerLevel(PatternLayout layout) {
// if we're in debug mode, set the mode up
if (debugMode) {
layout.setConversionPattern(debugPatternString);
} else {
layout.setConversionPattern(patternString);
}
// set the default logger level
Level par;
if (logging_level.toUpperCase().equals("DEBUG")) {
par = Level.DEBUG;
} else if (logging_level.toUpperCase().equals("ERROR")) {
par = Level.ERROR;
} else if (logging_level.toUpperCase().equals("FATAL")) {
par = Level.FATAL;
} else if (logging_level.toUpperCase().equals("INFO")) {
par = Level.INFO;
} else if (logging_level.toUpperCase().equals("WARN")) {
par = Level.WARN;
} else if (logging_level.toUpperCase().equals("OFF")) {
par = Level.OFF;
} else {
// we don't understand the logging level, let's get out of here
throw new ArgumentException("Unable to match: " + logging_level + " to a logging level, make sure it's a valid level (INFO, DEBUG, ERROR, FATAL, OFF)");
}
Logger.getRootLogger().setLevel(par);
}
/**
* a function used to indicate an error occurred in the command line tool
*/
private static void printDocumentationReference() {
errorPrintf("Visit our wiki for extensive documentation http://www.broadinstitute.org/gsa/wiki%n");
errorPrintf("Visit our forum to view answers to commonly asked questions http://getsatisfaction.com/gsa%n");
}
/**
* Do a cursory search for the given argument.
*
* @param parser Parser
*
* @return True if help is present; false otherwise.
*/
private static boolean isHelpPresent(ParsingEngine parser) {
return parser.isArgumentPresent("help");
}
/**
* Print help and exit.
*
* @param clp Instance of the command-line program.
* @param parser True if help is present; false otherwise.
*/
private static void printHelpAndExit(CommandLineProgram clp, ParsingEngine parser) {
parser.printHelp(clp.getApplicationDetails());
System.exit(0);
}
private static void errorPrintf(String format, Object... s) {
String formatted = String.format(format, s);
if ( formatted.trim().equals("") )
System.err.println("##### ERROR");
else {
for ( String part : formatted.split("\n") ) {
System.err.println("##### ERROR " + part);
}
}
}
/**
* used to indicate an error occured
*
* @param msg the message
* @param e the error
*/
public static void exitSystemWithError(String msg, final Exception e) {
errorPrintf("------------------------------------------------------------------------------------------%n");
errorPrintf("stack trace %n");
e.printStackTrace();
errorPrintf("------------------------------------------------------------------------------------------%n");
errorPrintf("A GATK RUNTIME ERROR has occurred (version %s):%n", CommandLineGATK.getVersionNumber());
errorPrintf("%n");
errorPrintf("Please visit the wiki to see if this is a known problem%n");
errorPrintf("If not, please post the error, with stack trace, to the GATK forum%n");
printDocumentationReference();
if ( msg == null ) // some exceptions don't have detailed messages
msg = "Code exception (see stack trace for error itself)";
errorPrintf("%n");
errorPrintf("MESSAGE: %s%n", msg.trim());
errorPrintf("------------------------------------------------------------------------------------------%n");
System.exit(1);
}
public static void exitSystemWithUserError(final Exception e) {
if ( e.getMessage() == null )
throw new ReviewedStingException("UserException found with no message!", e);
errorPrintf("------------------------------------------------------------------------------------------%n");
errorPrintf("A USER ERROR has occurred (version %s): %n", CommandLineGATK.getVersionNumber());
errorPrintf("The invalid arguments or inputs must be corrected before the GATK can proceed%n");
errorPrintf("Please do not post this error to the GATK forum%n");
errorPrintf("%n");
errorPrintf("See the documentation (rerun with -h) for this tool to view allowable command-line arguments.%n");
printDocumentationReference();
errorPrintf("%n");
errorPrintf("MESSAGE: %s%n", e.getMessage().trim());
errorPrintf("------------------------------------------------------------------------------------------%n");
System.exit(1);
}
public static void exitSystemWithSamError(final Exception e) {
if ( e.getMessage() == null )
throw new ReviewedStingException("SamException found with no message!", e);
errorPrintf("------------------------------------------------------------------------------------------%n");
errorPrintf("A BAM ERROR has occurred (version %s): %n", CommandLineGATK.getVersionNumber());
errorPrintf("The invalid inputs must be corrected before the GATK can proceed%n");
errorPrintf("Please do not post this error to the GATK forum until you have followed the instructions below%n");
errorPrintf("%n");
errorPrintf("Please make sure that your BAM file is well-formed by running Picard's validator on it%n");
errorPrintf("(see http://picard.sourceforge.net/command-line-overview.shtml#ValidateSamFile for details)%n");
errorPrintf("Also, please ensure that your BAM index is not corrupted: delete the current one and regenerate it with 'samtools index'%n");
printDocumentationReference();
errorPrintf("%n");
errorPrintf("MESSAGE: %s%n", e.getMessage().trim());
errorPrintf("------------------------------------------------------------------------------------------%n");
System.exit(1);
}
/**
* used to indicate an error occured
*
* @param e the exception occured
*/
public static void exitSystemWithError(Exception e) {
exitSystemWithError(e.getMessage(), e);
}
/**
* A hack to ensure that numbers are always formatted in the US style.
*/
protected static void forceJVMLocaleToUSEnglish() {
Locale.setDefault(Locale.US);
}
}
| Killed quiet mode. Should probably kill debugMode as well, but Queue's using
it. Will check with Khalid tomorrow.
git-svn-id: 4561c0a8f080806b19201efb9525134c00b76d40@5695 348d0f76-0448-11de-a6fe-93d51630548a
| java/src/org/broadinstitute/sting/commandline/CommandLineProgram.java | Killed quiet mode. Should probably kill debugMode as well, but Queue's using it. Will check with Khalid tomorrow. | <ide><path>ava/src/org/broadinstitute/sting/commandline/CommandLineProgram.java
<ide> required = false)
<ide> protected String toFile = null;
<ide>
<del> /** do we want to silence the command line output */
<del> @Argument(fullName = "quiet_output_mode",
<del> shortName = "quiet",
<del> doc = "Set the logging to quiet mode, no output to stdout",
<del> required = false)
<del> protected Boolean quietMode = false;
<del>
<ide> /** do we want to generate debugging information with the logs */
<ide> @Argument(fullName = "debug_mode",
<ide> shortName = "debug",
<ide>
<ide> // Initialize the logger using the loaded command line.
<ide> clp.setupLoggerLevel(layout);
<del> }
<del>
<del> // if they set the mode to quiet
<del> if (clp.quietMode) {
<del>
<del> // the only appender we should have is stdout, the following meathod is
<del> // deprecated, but the standard remove all appenders doesn't seem to work
<del> // TODO: find the right function
<del> //Category root = Category.getRoot();
<del> //root.removeAllAppenders();
<del> //logger.removeAllAppenders();
<ide> }
<ide>
<ide> // if they specify a log location, output our data there |
|
Java | apache-2.0 | f18bc31123e29520c81bef08cfbbe182a3ee8723 | 0 | gbif/dwca-io | package org.gbif.dwca.io;
/*
* Copyright 2011 Global Biodiversity Information Facility (GBIF)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.gbif.api.model.registry.Dataset;
import org.gbif.dwc.terms.DcTerm;
import org.gbif.dwc.terms.DwcTerm;
import org.gbif.dwc.terms.Term;
import org.gbif.dwca.record.Record;
import org.gbif.io.TabWriter;
import org.gbif.registry.metadata.EMLWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.*;
import java.util.*;
/**
* Simple writer class to create valid dwc archives using tab data files.
* The meta.xml descriptor is generated automatically and an optional EML metadata document can be added.
* The archive is NOT compressed but the final product is a directory with all the necessary files.
* For usage of this class please @see DwcaWriterTest.
*/
public class DwcaWriter {
private Logger log = LoggerFactory.getLogger(DwcaWriter.class);
private final File dir;
private final boolean useHeaders;
private long recordNum;
private String coreId;
private Map<Term, String> coreRow;
private final Term coreRowType;
private final Term coreIdTerm;
private final Map<Term, TabWriter> writers = Maps.newHashMap();
private final Set<Term> headersOut = Sets.newHashSet();
private final Map<Term, String> dataFileNames = Maps.newHashMap();
// key=rowType, value=columns
private final Map<Term, List<Term>> terms = Maps.newHashMap();
// key=rowType, value=default values per column
private final Map<Term, Map<Term, String>> defaultValues = Maps.newHashMap();
private final Map<Term, Map<Term, String>> multiValueDelimiter = Maps.newHashMap();
private Dataset eml;
private Map<String, Dataset> constituents = Maps.newHashMap();
/**
* Creates a new writer without header rows.
* @param coreRowType the core row type.
* @param dir the directory to create the archive in.
*/
public DwcaWriter(Term coreRowType, File dir) throws IOException {
this(coreRowType, dir, false);
}
/**
* If headers are used the first record must include all terms ever used for that file.
* If in subsequent rows additional terms are introduced an IllegalArgumentException is thrown.
*
* @param coreRowType the core row type
* @param dir the directory to create the archive in
* @param useHeaders if true the first row in every data file will include headers
*/
public DwcaWriter(Term coreRowType, File dir, boolean useHeaders) throws IOException {
this(coreRowType, null, dir, useHeaders);
}
/**
* If headers are used the first record must include all terms ever used for that file.
* If in subsequent rows additional terms are introduced an IllegalArgumentException is thrown.
*
* @param coreRowType the core row type
* @param coreIdTerm the term of the id column
* @param dir the directory to create the archive in
* @param useHeaders if true the first row in every data file will include headers
*/
public DwcaWriter(Term coreRowType, Term coreIdTerm, File dir, boolean useHeaders) throws IOException {
this.dir = dir;
this.coreRowType = coreRowType;
this.coreIdTerm = coreIdTerm;
this.useHeaders = useHeaders;
addRowType(coreRowType);
}
public static Map<Term, String> recordToMap(Record rec, ArchiveFile af) {
Map<Term, String> map = new HashMap<Term, String>();
for (Term t : af.getTerms()) {
map.put(t, rec.value(t));
}
return map;
}
public static String dataFileName(Term rowType) {
return rowType.simpleName().toLowerCase() + ".txt";
}
private void addRowType(Term rowType) throws IOException {
terms.put(rowType, new ArrayList<Term>());
String dfn = dataFileName(rowType);
dataFileNames.put(rowType, dfn);
File df = new File(dir, dfn);
FileUtils.forceMkdir(df.getParentFile());
OutputStream out = new FileOutputStream(df);
TabWriter wr = new TabWriter(out);
writers.put(rowType, wr);
}
/**
* A new core record is started and the last core and all extension records are written.
* @param id the new records id
* @throws IOException
*/
public void newRecord(String id) throws IOException {
// flush last record
flushLastCoreRecord();
// start new
recordNum++;
coreId = id;
coreRow = new HashMap<Term, String>();
}
private void flushLastCoreRecord() throws IOException {
if (coreRow != null) {
writeRow(coreRow, coreRowType);
}
}
public long getRecordsWritten() {
return recordNum;
}
private void writeRow(Map<Term, String> rowMap, Term rowType) throws IOException {
TabWriter writer = writers.get(rowType);
List<Term> columns = terms.get(rowType);
if (useHeaders && !headersOut.contains(rowType)){
// write header row
writeHeader(writer, rowType, columns);
}
// make sure coreId is not null for extensions
if (coreRowType != rowType && coreId == null){
log.warn("Adding an {} extension record to a core without an Id! Skip this record", rowType);
} else {
String[] row = new String[columns.size() + 1];
row[0] = coreId;
for (Map.Entry<Term, String> conceptTermStringEntry : rowMap.entrySet()) {
int column = 1 + columns.indexOf(conceptTermStringEntry.getKey());
row[column] = conceptTermStringEntry.getValue();
}
writer.write(row);
}
}
private void writeHeader(TabWriter writer, Term rowType, List<Term> columns) throws IOException {
int idx = 0;
String[] row = new String[columns.size() + 1];
Term idTerm;
if (DwcTerm.Taxon == coreRowType){
idTerm = DwcTerm.taxonID;
} else if (DwcTerm.Occurrence == coreRowType){
idTerm = DwcTerm.occurrenceID;
} else if (DwcTerm.Identification == coreRowType){
idTerm = DwcTerm.identificationID;
} else if (DwcTerm.Event == coreRowType){
idTerm = DwcTerm.eventID;
} else {
// default to generic dc identifier for id column
idTerm = DcTerm.identifier;
}
row[idx] = idTerm.simpleName();
for (Term term : columns) {
idx ++;
row[idx] = term.simpleName();
}
writer.write(row);
headersOut.add(rowType);
}
/**
* Add a single value for the current core record.
* Calling this method requires that #newRecord() has been called at least once,
* otherwise an IllegalStateException is thrown.
* @param term
* @param value
*/
public void addCoreColumn(Term term, String value) {
// ensure we do not overwrite the coreIdTerm if one is defined
if (coreIdTerm != null && coreIdTerm.equals(term)) {
throw new IllegalStateException("You cannot add a term that was specified as coreId term");
}
List<Term> coreTerms = terms.get(coreRowType);
if (!coreTerms.contains(term)) {
if (useHeaders && recordNum>1){
throw new IllegalStateException("You cannot add new terms after the first row when headers are enabled");
}
coreTerms.add(term);
}
try {
coreRow.put(term, value);
} catch (NullPointerException e) {
// no core record has been started yet
throw new IllegalStateException("No core record has been created yet. Call newRecord() at least once");
}
}
/**
* Convenience method to add an empty core column.
*/
public void addCoreColumn(Term term) {
addCoreColumn(term, (String) null);
}
/**
* Null safe convenience method to write integers.
* See addCoreColumn(Term term, String value) for docs
*/
public void addCoreColumn(Term term, @Nullable Integer value) {
addCoreColumn(term, value == null ? null : value.toString());
}
/**
* Null safe convenience method to write booleans.
* See addCoreColumn(Term term, String value) for docs
*/
public void addCoreColumn(Term term, @Nullable Boolean value) {
addCoreColumn(term, value == null ? null : value.toString());
}
/**
* Null safe convenience method to write enumeration values.
* See addCoreColumn(Term term, String value) for docs
*/
public void addCoreColumn(Term term, @Nullable Enum value) {
addCoreColumn(term, value == null ? null : value.name().toLowerCase().replaceAll("_", " "));
}
/**
* Null safe convenience method to write object values using the toString method.
* See addCoreColumn(Term term, String value) for docs
*/
public void addCoreColumn(Term term, @Nullable Object value) {
addCoreColumn(term, value == null ? null : value.toString());
}
/**
* Add a default value to a term of the core.
*
* @param term
* @param defaultValue
*/
public void addCoreDefaultValue(Term term, String defaultValue){
addDefaultValue(coreRowType, term, defaultValue);
}
/**
* Add a default value to a term of the provided rowType.
*
* @param rowType
* @param term
* @param defaultValue
*/
public void addDefaultValue(Term rowType, Term term, String defaultValue){
if(!defaultValues.containsKey(rowType)){
defaultValues.put(rowType, new HashMap<Term, String>());
}
Map<Term,String> currentDefaultValues= defaultValues.get(rowType);
if(currentDefaultValues.containsKey(term)){
throw new IllegalStateException("The default value of term "+ term + " is already defined");
}
currentDefaultValues.put(term, defaultValue);
}
/**
* Declares the multi value delimiter for a term of the core rowType.
*
* @param term
* @param defaultValue
*/
public void addCoreMultiValueDelimiter(Term term, String defaultValue){
addMultiValueDelimiter(coreRowType, term, defaultValue);
}
/**
* Declares the multi value delimiter for a term of the provided rowType.
*/
public void addMultiValueDelimiter(Term rowType, Term term, String delimiter){
if(!multiValueDelimiter.containsKey(rowType)){
multiValueDelimiter.put(rowType, new HashMap<Term, String>());
}
Map<Term,String> delimiters= defaultValues.get(rowType);
if(delimiters.containsKey(term)){
throw new IllegalStateException("The delimiter of term "+ term + " is already defined");
}
delimiters.put(term, delimiter);
}
/**
* @return new map of all current data file names by their rowTypes.
*/
public Map<Term, String> getDataFiles() {
return Maps.newHashMap(dataFileNames);
}
/**
* Add an extension record associated with the current core record.
*
* @param rowType
* @param row
* @throws IOException
*/
public void addExtensionRecord(Term rowType, Map<Term, String> row) throws IOException {
// make sure we know the extension rowtype
if (!terms.containsKey(rowType)) {
addRowType(rowType);
}
// make sure we know all terms
List<Term> knownTerms = terms.get(rowType);
final boolean isFirst = knownTerms.isEmpty();
for (Term term : row.keySet()) {
if (!knownTerms.contains(term)) {
if (useHeaders && !isFirst){
throw new IllegalStateException("You cannot add new terms after the first row when headers are enabled");
}
knownTerms.add(term);
}
}
// write extension record
writeRow(row, rowType);
}
public void setEml(Dataset eml) {
this.eml = eml;
}
/**
* Adds a constituent dataset using the dataset key as the datasetID
*/
public void addConstituent(Dataset eml) {
addConstituent(eml.getKey().toString(), eml);
}
/**
* Adds a constituent dataset.
* The eml file will be called as the datasetID which has to be unique.
*/
public void addConstituent(String datasetID, Dataset eml) {
this.constituents.put(datasetID, eml);
}
/**
* @return the set of available rowTypes in this archive
*/
public Set<Term> getRowTypes() {
return terms.keySet();
}
/**
* @return the list of term columns as used for the given row type
*/
public List<Term> getTerms(Term rowType) {
if (terms.containsKey(rowType)) {
return terms.get(rowType);
}
return Lists.newArrayList();
}
/**
* Writes meta.xml and eml.xml to the archive and closes tab writers.
*
*/
public void close() throws IOException {
addEml();
addConstituents();
addMeta();
// flush last record
flushLastCoreRecord();
// TODO: add missing columns in second iteration of data files
// close writers
for (TabWriter w : writers.values()) {
w.close();
}
}
protected static void writeEml(Dataset d, File f) throws IOException {
if (d != null) {
try (Writer writer = new FileWriter(f)){
EMLWriter.newInstance().writeTo(d, writer);
}
}
}
private void addEml() throws IOException {
writeEml(eml, new File(dir, "eml.xml"));
}
private void addConstituents() throws IOException {
if (!constituents.isEmpty()) {
File ddir = new File(dir, Archive.CONSTITUENT_DIR);
ddir.mkdirs();
for (Map.Entry<String, Dataset> de : constituents.entrySet()) {
writeEml(de.getValue(), new File(ddir, de.getKey()+".xml"));
}
}
}
private void addMeta() throws IOException {
File metaFile = new File(dir, Archive.META_FN);
Archive arch = new Archive();
if (eml != null) {
arch.setMetadataLocation("eml.xml");
}
arch.setCore(buildArchiveFile(arch, coreRowType, coreIdTerm));
for (Term rowType : this.terms.keySet()) {
if (!coreRowType.equals(rowType)) {
arch.addExtension(buildArchiveFile(arch, rowType, null));
}
}
MetaDescriptorWriter.writeMetaFile(metaFile, arch);
}
/**
* Build an ArchiveFile for core or extension(s).
*
* @param archive
* @param rowType
* @param idTerm the term of the id column, may be null
* @return
*/
private ArchiveFile buildArchiveFile(Archive archive, Term rowType, Term idTerm) {
ArchiveFile af = ArchiveFile.buildTabFile();
af.setArchive(archive);
af.addLocation(dataFileNames.get(rowType));
af.setEncoding("utf-8");
af.setIgnoreHeaderLines(useHeaders ? 1 : 0);
af.setRowType(rowType);
ArchiveField id = new ArchiveField();
id.setIndex(0);
af.setId(id);
// always use the index 0 for idTerm
if (idTerm != null) {
af.addField(buildArchiveField(0, idTerm));
}
Map<Term,String> termDefaultValueMap = defaultValues.get(rowType);
Map<Term,String> termMultiValueDelimiterMap = multiValueDelimiter.get(rowType);
List<Term> rowTypeTerms = terms.get(rowType);
int idx = 0;
String defaultValue;
String mvDelim;
for (Term c : rowTypeTerms) {
idx++;
defaultValue = (termDefaultValueMap !=null ? termDefaultValueMap.get(c) : null);
mvDelim = (termMultiValueDelimiterMap !=null ? termMultiValueDelimiterMap.get(c) : null);
af.addField(buildArchiveField(idx, c, defaultValue, mvDelim));
}
// check if default values are provided for this rowType
if(termDefaultValueMap != null){
for (Term t : termDefaultValueMap.keySet()) {
if(!rowTypeTerms.contains(t)){
af.addField(buildArchiveFieldDefaultValue(t, termDefaultValueMap.get(t)));
}
}
}
return af;
}
/**
* Build an ArchiveField with a defaultValue and no index.
*
* @param term
* @param defaultValue
* @return
*/
private ArchiveField buildArchiveFieldDefaultValue(Term term, String defaultValue){
Preconditions.checkNotNull(term, "Can't use a null term");
Preconditions.checkNotNull(defaultValue, "Can't use a null defaultValue");
return new ArchiveField(term, defaultValue);
}
/**
* Build an ArchiveField with no defaultValue.
*
* @param idx
* @param term
* @return
*/
private ArchiveField buildArchiveField(Integer idx, Term term){
return buildArchiveField(idx, term, null);
}
/**
*
* Build an ArchiveField from optional parameters.
*
* @param idx index or null
* @param term term or null
* @param defaultValue default value or null
* @return
*/
private ArchiveField buildArchiveField(Integer idx, Term term, String defaultValue){
return buildArchiveField(idx, term, defaultValue, null);
}
/**
*
* Build an ArchiveField from optional parameters.
*
* @param idx index or null
* @param term term or null
* @param defaultValue default value or null
* @param multiValueDelimiter value delimiter or null
*/
private ArchiveField buildArchiveField(Integer idx, Term term, String defaultValue, String multiValueDelimiter){
Preconditions.checkNotNull(idx, "Can't use a null index");
Preconditions.checkNotNull(term, "Can't use a null term");
ArchiveField field = new ArchiveField(idx, term);
if (StringUtils.isNotBlank(defaultValue)){
field.setDefaultValue(defaultValue);
}
if (StringUtils.isNotEmpty(multiValueDelimiter)){
field.setDelimitedBy(multiValueDelimiter);
}
return field;
}
}
| src/main/java/org/gbif/dwca/io/DwcaWriter.java | package org.gbif.dwca.io;
/*
* Copyright 2011 Global Biodiversity Information Facility (GBIF)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.gbif.api.model.registry.Dataset;
import org.gbif.dwc.terms.DcTerm;
import org.gbif.dwc.terms.DwcTerm;
import org.gbif.dwc.terms.Term;
import org.gbif.dwca.record.Record;
import org.gbif.io.TabWriter;
import org.gbif.registry.metadata.EMLWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.io.Closeables;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Simple writer class to create valid dwc archives using tab data files.
* The meta.xml descriptor is generated automatically and an optional EML metadata document can be added.
* The archive is NOT compressed but the final product is a directory with all the necessary files.
* For usage of this class please @see DwcaWriterTest.
*/
public class DwcaWriter {
private Logger log = LoggerFactory.getLogger(DwcaWriter.class);
private final File dir;
private final boolean useHeaders;
private long recordNum;
private String coreId;
private Map<Term, String> coreRow;
private final Term coreRowType;
private final Term coreIdTerm;
private final Map<Term, TabWriter> writers = Maps.newHashMap();
private final Set<Term> headersOut = Sets.newHashSet();
private final Map<Term, String> dataFileNames = Maps.newHashMap();
// key=rowType, value=columns
private final Map<Term, List<Term>> terms = Maps.newHashMap();
// key=rowType, value=default values per column
private final Map<Term, Map<Term, String>> defaultValues = Maps.newHashMap();
private Dataset eml;
private Map<String, Dataset> constituents = Maps.newHashMap();
/**
* Creates a new writer without header rows.
* @param coreRowType the core row type.
* @param dir the directory to create the archive in.
*/
public DwcaWriter(Term coreRowType, File dir) throws IOException {
this(coreRowType, dir, false);
}
/**
* If headers are used the first record must include all terms ever used for that file.
* If in subsequent rows additional terms are introduced an IllegalArgumentException is thrown.
*
* @param coreRowType the core row type
* @param dir the directory to create the archive in
* @param useHeaders if true the first row in every data file will include headers
*/
public DwcaWriter(Term coreRowType, File dir, boolean useHeaders) throws IOException {
this(coreRowType, null, dir, useHeaders);
}
/**
* If headers are used the first record must include all terms ever used for that file.
* If in subsequent rows additional terms are introduced an IllegalArgumentException is thrown.
*
* @param coreRowType the core row type
* @param coreIdTerm the term of the id column
* @param dir the directory to create the archive in
* @param useHeaders if true the first row in every data file will include headers
*/
public DwcaWriter(Term coreRowType, Term coreIdTerm, File dir, boolean useHeaders) throws IOException {
this.dir = dir;
this.coreRowType = coreRowType;
this.coreIdTerm = coreIdTerm;
this.useHeaders = useHeaders;
addRowType(coreRowType);
}
public static Map<Term, String> recordToMap(Record rec, ArchiveFile af) {
Map<Term, String> map = new HashMap<Term, String>();
for (Term t : af.getTerms()) {
map.put(t, rec.value(t));
}
return map;
}
public static String dataFileName(Term rowType) {
return rowType.simpleName().toLowerCase() + ".txt";
}
private void addRowType(Term rowType) throws IOException {
terms.put(rowType, new ArrayList<Term>());
String dfn = dataFileName(rowType);
dataFileNames.put(rowType, dfn);
File df = new File(dir, dfn);
FileUtils.forceMkdir(df.getParentFile());
OutputStream out = new FileOutputStream(df);
TabWriter wr = new TabWriter(out);
writers.put(rowType, wr);
}
/**
* A new core record is started and the last core and all extension records are written.
* @param id the new records id
* @throws IOException
*/
public void newRecord(String id) throws IOException {
// flush last record
flushLastCoreRecord();
// start new
recordNum++;
coreId = id;
coreRow = new HashMap<Term, String>();
}
private void flushLastCoreRecord() throws IOException {
if (coreRow != null) {
writeRow(coreRow, coreRowType);
}
}
public long getRecordsWritten() {
return recordNum;
}
private void writeRow(Map<Term, String> rowMap, Term rowType) throws IOException {
TabWriter writer = writers.get(rowType);
List<Term> columns = terms.get(rowType);
if (useHeaders && !headersOut.contains(rowType)){
// write header row
writeHeader(writer, rowType, columns);
}
// make sure coreId is not null for extensions
if (coreRowType != rowType && coreId == null){
log.warn("Adding an {} extension record to a core without an Id! Skip this record", rowType);
} else {
String[] row = new String[columns.size() + 1];
row[0] = coreId;
for (Map.Entry<Term, String> conceptTermStringEntry : rowMap.entrySet()) {
int column = 1 + columns.indexOf(conceptTermStringEntry.getKey());
row[column] = conceptTermStringEntry.getValue();
}
writer.write(row);
}
}
private void writeHeader(TabWriter writer, Term rowType, List<Term> columns) throws IOException {
int idx = 0;
String[] row = new String[columns.size() + 1];
Term idTerm;
if (DwcTerm.Taxon == coreRowType){
idTerm = DwcTerm.taxonID;
} else if (DwcTerm.Occurrence == coreRowType){
idTerm = DwcTerm.occurrenceID;
} else if (DwcTerm.Identification == coreRowType){
idTerm = DwcTerm.identificationID;
} else if (DwcTerm.Event == coreRowType){
idTerm = DwcTerm.eventID;
} else {
// default to generic dc identifier for id column
idTerm = DcTerm.identifier;
}
row[idx] = idTerm.simpleName();
for (Term term : columns) {
idx ++;
row[idx] = term.simpleName();
}
writer.write(row);
headersOut.add(rowType);
}
/**
* Add a single value for the current core record.
* Calling this method requires that #newRecord() has been called at least once,
* otherwise an IllegalStateException is thrown.
* @param term
* @param value
*/
public void addCoreColumn(Term term, String value) {
// ensure we do not overwrite the coreIdTerm if one is defined
if (coreIdTerm != null && coreIdTerm.equals(term)) {
throw new IllegalStateException("You cannot add a term that was specified as coreId term");
}
List<Term> coreTerms = terms.get(coreRowType);
if (!coreTerms.contains(term)) {
if (useHeaders && recordNum>1){
throw new IllegalStateException("You cannot add new terms after the first row when headers are enabled");
}
coreTerms.add(term);
}
try {
coreRow.put(term, value);
} catch (NullPointerException e) {
// no core record has been started yet
throw new IllegalStateException("No core record has been created yet. Call newRecord() at least once");
}
}
/**
* Convenience method to add an empty core column.
*/
public void addCoreColumn(Term term) {
addCoreColumn(term, (String) null);
}
/**
* Null safe convenience method to write integers.
* See addCoreColumn(Term term, String value) for docs
*/
public void addCoreColumn(Term term, @Nullable Integer value) {
addCoreColumn(term, value == null ? null : value.toString());
}
/**
* Null safe convenience method to write booleans.
* See addCoreColumn(Term term, String value) for docs
*/
public void addCoreColumn(Term term, @Nullable Boolean value) {
addCoreColumn(term, value == null ? null : value.toString());
}
/**
* Null safe convenience method to write enumeration values.
* See addCoreColumn(Term term, String value) for docs
*/
public void addCoreColumn(Term term, @Nullable Enum value) {
addCoreColumn(term, value == null ? null : value.name().toLowerCase().replaceAll("_", " "));
}
/**
* Null safe convenience method to write object values using the toString method.
* See addCoreColumn(Term term, String value) for docs
*/
public void addCoreColumn(Term term, @Nullable Object value) {
addCoreColumn(term, value == null ? null : value.toString());
}
/**
* Add a default value to a term of the core.
*
* @param term
* @param defaultValue
*/
public void addCoreDefaultValue(Term term, String defaultValue){
addDefaultValue(coreRowType, term, defaultValue);
}
/**
* Add a default value to a term of the provided rowType.
*
* @param rowType
* @param term
* @param defaultValue
*/
public void addDefaultValue(Term rowType, Term term, String defaultValue){
if(!defaultValues.containsKey(rowType)){
defaultValues.put(rowType, new HashMap<Term, String>());
}
Map<Term,String> currentDefaultValues= defaultValues.get(rowType);
if(currentDefaultValues.containsKey(term)){
throw new IllegalStateException("The default value of term "+ term + " is already defined");
}
currentDefaultValues.put(term, defaultValue);
}
/**
* @return new map of all current data file names by their rowTypes.
*/
public Map<Term, String> getDataFiles() {
return Maps.newHashMap(dataFileNames);
}
/**
* Add an extension record associated with the current core record.
*
* @param rowType
* @param row
* @throws IOException
*/
public void addExtensionRecord(Term rowType, Map<Term, String> row) throws IOException {
// make sure we know the extension rowtype
if (!terms.containsKey(rowType)) {
addRowType(rowType);
}
// make sure we know all terms
List<Term> knownTerms = terms.get(rowType);
final boolean isFirst = knownTerms.isEmpty();
for (Term term : row.keySet()) {
if (!knownTerms.contains(term)) {
if (useHeaders && !isFirst){
throw new IllegalStateException("You cannot add new terms after the first row when headers are enabled");
}
knownTerms.add(term);
}
}
// write extension record
writeRow(row, rowType);
}
public void setEml(Dataset eml) {
this.eml = eml;
}
/**
* Adds a constituent dataset using the dataset key as the datasetID
*/
public void addConstituent(Dataset eml) {
addConstituent(eml.getKey().toString(), eml);
}
/**
* Adds a constituent dataset.
* The eml file will be called as the datasetID which has to be unique.
*/
public void addConstituent(String datasetID, Dataset eml) {
this.constituents.put(datasetID, eml);
}
/**
* @return the set of available rowTypes in this archive
*/
public Set<Term> getRowTypes() {
return terms.keySet();
}
/**
* @return the list of term columns as used for the given row type
*/
public List<Term> getTerms(Term rowType) {
if (terms.containsKey(rowType)) {
return terms.get(rowType);
}
return Lists.newArrayList();
}
/**
* Writes meta.xml and eml.xml to the archive and closes tab writers.
*
*/
public void close() throws IOException {
addEml();
addConstituents();
addMeta();
// flush last record
flushLastCoreRecord();
// TODO: add missing columns in second iteration of data files
// close writers
for (TabWriter w : writers.values()) {
w.close();
}
}
protected static void writeEml(Dataset d, File f) throws IOException {
if (d != null) {
try (Writer writer = new FileWriter(f)){
EMLWriter.newInstance().writeTo(d, writer);
}
}
}
private void addEml() throws IOException {
writeEml(eml, new File(dir, "eml.xml"));
}
private void addConstituents() throws IOException {
if (!constituents.isEmpty()) {
File ddir = new File(dir, Archive.CONSTITUENT_DIR);
ddir.mkdirs();
for (Map.Entry<String, Dataset> de : constituents.entrySet()) {
writeEml(de.getValue(), new File(ddir, de.getKey()+".xml"));
}
}
}
private void addMeta() throws IOException {
File metaFile = new File(dir, Archive.META_FN);
Archive arch = new Archive();
if (eml != null) {
arch.setMetadataLocation("eml.xml");
}
arch.setCore(buildArchiveFile(arch, coreRowType, coreIdTerm));
for (Term rowType : this.terms.keySet()) {
if (!coreRowType.equals(rowType)) {
arch.addExtension(buildArchiveFile(arch, rowType, null));
}
}
MetaDescriptorWriter.writeMetaFile(metaFile, arch);
}
/**
* Build an ArchiveFile for core or extension(s).
*
* @param archive
* @param rowType
* @param idTerm the term of the id column, may be null
* @return
*/
private ArchiveFile buildArchiveFile(Archive archive, Term rowType, Term idTerm) {
ArchiveFile af = ArchiveFile.buildTabFile();
af.setArchive(archive);
af.addLocation(dataFileNames.get(rowType));
af.setEncoding("utf-8");
af.setIgnoreHeaderLines(useHeaders ? 1 : 0);
af.setRowType(rowType);
ArchiveField id = new ArchiveField();
id.setIndex(0);
af.setId(id);
// always use the index 0 for idTerm
if (idTerm != null) {
af.addField(buildArchiveField(0, idTerm));
}
Map<Term,String> termDefaultValueMap = defaultValues.get(rowType);
List<Term> rowTypeTerms = terms.get(rowType);
int idx = 0;
String defaultValue;
for (Term c : rowTypeTerms) {
idx++;
defaultValue = (termDefaultValueMap !=null ? termDefaultValueMap.get(c) : null);
af.addField(buildArchiveField(idx, c, defaultValue));
}
// check if default values are provided for this rowType
if(termDefaultValueMap != null){
for (Term t : termDefaultValueMap.keySet()) {
if(!rowTypeTerms.contains(t)){
af.addField(buildArchiveFieldDefaultValue(t, termDefaultValueMap.get(t)));
}
}
}
return af;
}
/**
* Build an ArchiveField with a defaultValue and no index.
*
* @param term
* @param defaultValue
* @return
*/
private ArchiveField buildArchiveFieldDefaultValue(Term term, String defaultValue){
Preconditions.checkNotNull(term, "Can't use a null term");
Preconditions.checkNotNull(defaultValue, "Can't use a null defaultValue");
return new ArchiveField(term, defaultValue);
}
/**
* Build an ArchiveField with no defaultValue.
*
* @param idx
* @param term
* @return
*/
private ArchiveField buildArchiveField(Integer idx, Term term){
return buildArchiveField(idx, term, null);
}
/**
*
* Build an ArchiveField from optional parameters.
*
* @param idx index or null
* @param term term or null
* @param defaultValue default value or null
* @return
*/
private ArchiveField buildArchiveField(Integer idx, Term term, String defaultValue){
Preconditions.checkNotNull(idx, "Can't use a null index");
Preconditions.checkNotNull(term, "Can't use a null term");
ArchiveField field = new ArchiveField(idx, term);
if (StringUtils.isNotBlank(defaultValue)){
field.setDefaultValue(defaultValue);
}
return field;
}
}
| Add DwcaWriter methods to deal with multi value delimiters
| src/main/java/org/gbif/dwca/io/DwcaWriter.java | Add DwcaWriter methods to deal with multi value delimiters | <ide><path>rc/main/java/org/gbif/dwca/io/DwcaWriter.java
<ide> * limitations under the License.
<ide> */
<ide>
<add>import com.google.common.base.Preconditions;
<add>import com.google.common.collect.Lists;
<add>import com.google.common.collect.Maps;
<add>import com.google.common.collect.Sets;
<add>import org.apache.commons.io.FileUtils;
<add>import org.apache.commons.lang3.StringUtils;
<ide> import org.gbif.api.model.registry.Dataset;
<ide> import org.gbif.dwc.terms.DcTerm;
<ide> import org.gbif.dwc.terms.DwcTerm;
<ide> import org.gbif.dwca.record.Record;
<ide> import org.gbif.io.TabWriter;
<ide> import org.gbif.registry.metadata.EMLWriter;
<del>
<del>import java.io.File;
<del>import java.io.FileOutputStream;
<del>import java.io.FileWriter;
<del>import java.io.IOException;
<del>import java.io.OutputStream;
<del>import java.io.Writer;
<del>import java.util.ArrayList;
<del>import java.util.HashMap;
<del>import java.util.List;
<del>import java.util.Map;
<del>import java.util.Set;
<del>import javax.annotation.Nullable;
<del>
<del>import com.google.common.base.Preconditions;
<del>import com.google.common.collect.Lists;
<del>import com.google.common.collect.Maps;
<del>import com.google.common.collect.Sets;
<del>import com.google.common.io.Closeables;
<del>import org.apache.commons.io.FileUtils;
<del>import org.apache.commons.lang3.StringUtils;
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<add>
<add>import javax.annotation.Nullable;
<add>import java.io.*;
<add>import java.util.*;
<ide>
<ide> /**
<ide> * Simple writer class to create valid dwc archives using tab data files.
<ide> private final Map<Term, List<Term>> terms = Maps.newHashMap();
<ide> // key=rowType, value=default values per column
<ide> private final Map<Term, Map<Term, String>> defaultValues = Maps.newHashMap();
<add> private final Map<Term, Map<Term, String>> multiValueDelimiter = Maps.newHashMap();
<ide> private Dataset eml;
<ide> private Map<String, Dataset> constituents = Maps.newHashMap();
<ide>
<ide> }
<ide>
<ide> /**
<add> * Declares the multi value delimiter for a term of the core rowType.
<add> *
<add> * @param term
<add> * @param defaultValue
<add> */
<add> public void addCoreMultiValueDelimiter(Term term, String defaultValue){
<add> addMultiValueDelimiter(coreRowType, term, defaultValue);
<add> }
<add>
<add> /**
<add> * Declares the multi value delimiter for a term of the provided rowType.
<add> */
<add> public void addMultiValueDelimiter(Term rowType, Term term, String delimiter){
<add>
<add> if(!multiValueDelimiter.containsKey(rowType)){
<add> multiValueDelimiter.put(rowType, new HashMap<Term, String>());
<add> }
<add> Map<Term,String> delimiters= defaultValues.get(rowType);
<add> if(delimiters.containsKey(term)){
<add> throw new IllegalStateException("The delimiter of term "+ term + " is already defined");
<add> }
<add> delimiters.put(term, delimiter);
<add> }
<add>
<add> /**
<ide> * @return new map of all current data file names by their rowTypes.
<ide> */
<ide> public Map<Term, String> getDataFiles() {
<ide> }
<ide>
<ide> Map<Term,String> termDefaultValueMap = defaultValues.get(rowType);
<add> Map<Term,String> termMultiValueDelimiterMap = multiValueDelimiter.get(rowType);
<ide> List<Term> rowTypeTerms = terms.get(rowType);
<ide> int idx = 0;
<ide> String defaultValue;
<add> String mvDelim;
<ide> for (Term c : rowTypeTerms) {
<ide> idx++;
<ide> defaultValue = (termDefaultValueMap !=null ? termDefaultValueMap.get(c) : null);
<del> af.addField(buildArchiveField(idx, c, defaultValue));
<add> mvDelim = (termMultiValueDelimiterMap !=null ? termMultiValueDelimiterMap.get(c) : null);
<add> af.addField(buildArchiveField(idx, c, defaultValue, mvDelim));
<ide> }
<ide>
<ide> // check if default values are provided for this rowType
<ide> * @return
<ide> */
<ide> private ArchiveField buildArchiveField(Integer idx, Term term, String defaultValue){
<add> return buildArchiveField(idx, term, defaultValue, null);
<add> }
<add>
<add> /**
<add> *
<add> * Build an ArchiveField from optional parameters.
<add> *
<add> * @param idx index or null
<add> * @param term term or null
<add> * @param defaultValue default value or null
<add> * @param multiValueDelimiter value delimiter or null
<add> */
<add> private ArchiveField buildArchiveField(Integer idx, Term term, String defaultValue, String multiValueDelimiter){
<ide> Preconditions.checkNotNull(idx, "Can't use a null index");
<ide> Preconditions.checkNotNull(term, "Can't use a null term");
<ide>
<ide> if (StringUtils.isNotBlank(defaultValue)){
<ide> field.setDefaultValue(defaultValue);
<ide> }
<add> if (StringUtils.isNotEmpty(multiValueDelimiter)){
<add> field.setDelimitedBy(multiValueDelimiter);
<add> }
<ide> return field;
<ide> }
<ide> } |
|
Java | apache-2.0 | 4c68b0765038f5f0ffda23a3699f67e0c5b8ab57 | 0 | serge-rider/dbeaver,dbeaver/dbeaver,Sargul/dbeaver,Sargul/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,Sargul/dbeaver | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2015 Serge Rieder ([email protected])
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License (version 2)
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.jkiss.dbeaver.ui.dialogs.driver;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.jface.dialogs.ErrorDialog;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.*;
import org.jkiss.dbeaver.model.DBPDriverLibrary;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.DBRRunnableContext;
import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress;
import org.jkiss.dbeaver.registry.driver.DriverDescriptor;
import org.jkiss.dbeaver.registry.driver.DriverFileManager;
import org.jkiss.dbeaver.runtime.RunnableContextDelegate;
import org.jkiss.dbeaver.ui.DBeaverIcons;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.utils.GeneralUtils;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
class DriverDownloadAutoPage extends DriverDownloadPage {
DriverDownloadAutoPage() {
super("Automatic download", "Download driver files", null);
setPageComplete(false);
}
@Override
public void createControl(Composite parent) {
DriverDownloadWizard wizard = getWizard();
final DriverDescriptor driver = wizard.getDriver();
setMessage("Download " + driver.getFullName() + " driver files");
initializeDialogUnits(parent);
Composite composite = UIUtils.createPlaceholder(parent, 1);
composite.setLayoutData(new GridData(GridData.FILL_BOTH));
if (!wizard.isForceDownload()) {
Label infoText = new Label(composite, SWT.NONE);
infoText.setText(driver.getFullName() + " driver files are missing.\nDBeaver can download these files automatically.\n\n");
infoText.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
}
{
Group filesGroup = UIUtils.createControlGroup(composite, "Files required by driver", 1, -1, -1);
filesGroup.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
Tree filesTree = new Tree(filesGroup, SWT.BORDER | SWT.FULL_SELECTION);
filesTree.setHeaderVisible(true);
filesTree.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
UIUtils.createTreeColumn(filesTree, SWT.LEFT, "File");
UIUtils.createTreeColumn(filesTree, SWT.LEFT, "Version");
for (DBPDriverLibrary file : wizard.getFiles()) {
TreeItem item = new TreeItem(filesTree, SWT.NONE);
item.setImage(DBeaverIcons.getImage(file.getIcon()));
item.setText(0, file.getDisplayName());
item.setText(1, "");
}
UIUtils.packColumns(filesTree);
}
if (!wizard.isForceDownload()) {
Label infoText = new Label(composite, SWT.NONE);
infoText.setText("\nOr you can obtain driver files by yourself and add them in driver editor.");
infoText.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
}
createLinksPanel(composite);
setControl(composite);
}
@Override
void performFinish() {
downloadLibraryFiles(new RunnableContextDelegate(getContainer()), getWizard().getFiles());
}
private void downloadLibraryFiles(DBRRunnableContext runnableContext, final List<? extends DBPDriverLibrary> files)
{
if (!getWizard().getDriver().acceptDriverLicenses(runnableContext)) {
return;
}
for (int i = 0, filesSize = files.size(); i < filesSize; ) {
DBPDriverLibrary lib = files.get(i);
int result = downloadLibraryFile(runnableContext, lib);
switch (result) {
case IDialogConstants.CANCEL_ID:
case IDialogConstants.ABORT_ID:
return;
case IDialogConstants.RETRY_ID:
continue;
case IDialogConstants.OK_ID:
case IDialogConstants.IGNORE_ID:
i++;
break;
}
}
}
private int downloadLibraryFile(DBRRunnableContext runnableContext, final DBPDriverLibrary file)
{
try {
runnableContext.run(true, true, new DBRRunnableWithProgress() {
@Override
public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
DriverFileManager.downloadLibraryFile(monitor, file, getWizard().isUpdateVersion());
} catch (IOException e) {
throw new InvocationTargetException(e);
}
}
});
return IDialogConstants.OK_ID;
} catch (InterruptedException e) {
// User just canceled download
return IDialogConstants.CANCEL_ID;
} catch (InvocationTargetException e) {
if (file.getType() == DBPDriverLibrary.FileType.license) {
return IDialogConstants.OK_ID;
}
DownloadRetry retryConfirm = new DownloadRetry(file, e.getTargetException());
UIUtils.runInUI(null, retryConfirm);
return retryConfirm.result;
}
}
private class DownloadRetry implements Runnable {
private final DBPDriverLibrary file;
private final Throwable error;
private int result;
public DownloadRetry(DBPDriverLibrary file, Throwable error)
{
this.file = file;
this.error = error;
}
@Override
public void run()
{
DownloadErrorDialog dialog = new DownloadErrorDialog(
null,
file.getDisplayName(),
"Driver file download failed.\nDo you want to retry?",
error);
result = dialog.open();
}
}
public static class DownloadErrorDialog extends ErrorDialog {
public DownloadErrorDialog(
Shell parentShell,
String dialogTitle,
String message,
Throwable error)
{
super(parentShell, dialogTitle, message,
GeneralUtils.makeExceptionStatus(error),
IStatus.INFO | IStatus.WARNING | IStatus.ERROR);
}
@Override
protected void createButtonsForButtonBar(Composite parent) {
createButton(
parent,
IDialogConstants.ABORT_ID,
IDialogConstants.ABORT_LABEL,
true);
createButton(
parent,
IDialogConstants.RETRY_ID,
IDialogConstants.RETRY_LABEL,
false);
createButton(
parent,
IDialogConstants.IGNORE_ID,
IDialogConstants.IGNORE_LABEL,
false);
createDetailsButton(parent);
}
@Override
protected void buttonPressed(int buttonId) {
if (buttonId == IDialogConstants.DETAILS_ID) {
super.buttonPressed(buttonId);
} else {
setReturnCode(buttonId);
close();
}
}
}
}
| plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/dialogs/driver/DriverDownloadAutoPage.java | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2015 Serge Rieder ([email protected])
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License (version 2)
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.jkiss.dbeaver.ui.dialogs.driver;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.jface.dialogs.ErrorDialog;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.*;
import org.jkiss.dbeaver.model.DBPDriverLibrary;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.DBRRunnableContext;
import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress;
import org.jkiss.dbeaver.registry.driver.DriverDescriptor;
import org.jkiss.dbeaver.registry.driver.DriverFileManager;
import org.jkiss.dbeaver.runtime.RunnableContextDelegate;
import org.jkiss.dbeaver.ui.DBeaverIcons;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.utils.GeneralUtils;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
class DriverDownloadAutoPage extends DriverDownloadPage {
DriverDownloadAutoPage() {
super("Automatic download", "Download driver files", null);
setPageComplete(false);
}
@Override
public void createControl(Composite parent) {
DriverDownloadWizard wizard = getWizard();
final DriverDescriptor driver = wizard.getDriver();
setMessage("Download " + driver.getFullName() + " driver files");
initializeDialogUnits(parent);
Composite composite = UIUtils.createPlaceholder(parent, 1);
composite.setLayoutData(new GridData(GridData.FILL_BOTH));
if (!wizard.isForceDownload()) {
Label infoText = new Label(composite, SWT.NONE);
infoText.setText(driver.getFullName() + " driver files are missing.\nDBeaver can download these files automatically.\n\n");
infoText.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
}
{
Group filesGroup = UIUtils.createControlGroup(composite, "Files required by driver", 1, -1, -1);
filesGroup.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
Tree filesTree = new Tree(filesGroup, SWT.BORDER | SWT.FULL_SELECTION);
filesTree.setHeaderVisible(true);
filesTree.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
UIUtils.createTreeColumn(filesTree, SWT.LEFT, "File");
UIUtils.createTreeColumn(filesTree, SWT.LEFT, "Version");
for (DBPDriverLibrary file : wizard.getFiles()) {
TreeItem item = new TreeItem(filesTree, SWT.NONE);
item.setImage(DBeaverIcons.getImage(file.getIcon()));
item.setText(0, file.getDisplayName());
item.setText(1, "");
}
UIUtils.packColumns(filesTree);
}
if (!wizard.isForceDownload()) {
Label infoText = new Label(composite, SWT.NONE);
infoText.setText("\nOr you can obtain driver files by yourself and add them in driver editor.");
infoText.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
}
createLinksPanel(composite);
setControl(composite);
}
@Override
void performFinish() {
downloadLibraryFiles(new RunnableContextDelegate(getContainer()), getWizard().getFiles());
}
private void downloadLibraryFiles(DBRRunnableContext runnableContext, final List<? extends DBPDriverLibrary> files)
{
if (!getWizard().getDriver().acceptDriverLicenses(runnableContext)) {
return;
}
for (int i = 0, filesSize = files.size(); i < filesSize; ) {
DBPDriverLibrary lib = files.get(i);
int result = downloadLibraryFile(runnableContext, lib);
switch (result) {
case IDialogConstants.CANCEL_ID:
case IDialogConstants.ABORT_ID:
return;
case IDialogConstants.RETRY_ID:
continue;
case IDialogConstants.OK_ID:
case IDialogConstants.IGNORE_ID:
i++;
break;
}
}
}
private int downloadLibraryFile(DBRRunnableContext runnableContext, final DBPDriverLibrary file)
{
try {
runnableContext.run(true, true, new DBRRunnableWithProgress() {
@Override
public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
DriverFileManager.downloadLibraryFile(monitor, file, getWizard().isUpdateVersion());
} catch (IOException e) {
throw new InvocationTargetException(e);
}
}
});
return IDialogConstants.OK_ID;
} catch (InterruptedException e) {
// User just canceled download
return IDialogConstants.CANCEL_ID;
} catch (InvocationTargetException e) {
if (file.getType() == DBPDriverLibrary.FileType.license) {
return IDialogConstants.OK_ID;
}
DownloadRetry retryConfirm = new DownloadRetry(file, e.getTargetException());
UIUtils.runInUI(null, retryConfirm);
return retryConfirm.result;
}
}
private class DownloadRetry implements Runnable {
private final DBPDriverLibrary file;
private final Throwable error;
private int result;
public DownloadRetry(DBPDriverLibrary file, Throwable error)
{
this.file = file;
this.error = error;
}
@Override
public void run()
{
DownloadErrorDialog dialog = new DownloadErrorDialog(
null,
file.getPath(),
"Driver file download failed.\nDo you want to retry?",
error);
result = dialog.open();
}
}
public static class DownloadErrorDialog extends ErrorDialog {
public DownloadErrorDialog(
Shell parentShell,
String dialogTitle,
String message,
Throwable error)
{
super(parentShell, dialogTitle, message,
GeneralUtils.makeExceptionStatus(error),
IStatus.INFO | IStatus.WARNING | IStatus.ERROR);
}
@Override
protected void createButtonsForButtonBar(Composite parent) {
createButton(
parent,
IDialogConstants.ABORT_ID,
IDialogConstants.ABORT_LABEL,
true);
createButton(
parent,
IDialogConstants.RETRY_ID,
IDialogConstants.RETRY_LABEL,
false);
createButton(
parent,
IDialogConstants.IGNORE_ID,
IDialogConstants.IGNORE_LABEL,
false);
createDetailsButton(parent);
}
@Override
protected void buttonPressed(int buttonId) {
if (buttonId == IDialogConstants.DETAILS_ID) {
super.buttonPressed(buttonId);
} else {
setReturnCode(buttonId);
close();
}
}
}
}
| Driver libraries model
Former-commit-id: ed31c5b229f60d539bb891cbada3944cfe8510d0 | plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/dialogs/driver/DriverDownloadAutoPage.java | Driver libraries model | <ide><path>lugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/dialogs/driver/DriverDownloadAutoPage.java
<ide> {
<ide> DownloadErrorDialog dialog = new DownloadErrorDialog(
<ide> null,
<del> file.getPath(),
<add> file.getDisplayName(),
<ide> "Driver file download failed.\nDo you want to retry?",
<ide> error);
<ide> result = dialog.open(); |
|
JavaScript | agpl-3.0 | 5f4d0cf2a501ed848de944bb903bf3354059546d | 0 | timelapseplus/VIEW,timelapseplus/VIEW,timelapseplus/VIEW,timelapseplus/VIEW,timelapseplus/VIEW | require('rootpath')();
var INPUTS_BIN_PATH = "/home/view/current/bin/inputs";
var GESTURE_BIN_PATH = "/home/view/current/bin/gesture";
var GestureLib = require('apds-gesture');
var spawn = require('child_process').spawn;
var exec = require('child_process').exec;
var Button = require('gpio-button');
var db = require("system/db.js");
var EventEmitter = require("events").EventEmitter;
var inputs = new EventEmitter();
VAR GESTURE_INT_GPIO = 72;
VAR GESTURE_I2C_BUS = 2;
var gesture = GestureLib.use(GESTURE_I2C_BUS, GESTURE_INT_GPIO);
gesture.on('ready', function() {
console.log("INPUTS: found a gesture sensor");
});
gesture.on('error', function(err) {
console.log("INPUTS: Gesture Error: ", err);
});
gesture.on('movement', function(dir) {
inputs.emit('G', dir.substr(0, 1).toUpperCase());
});
var inputsProcess = null;
var inputsRunning = false;
var gestureProcess = null;
var gestureRunning = false;
var stop = false;
var stopGesture = false;
var HOLD_TIME = 1500;
var powerButton = {
platformEvent: "1c2ac00.i2c-platform-axp20x-pek",
116: {
name: "power",
pressed: 5,
held: 6
}
}
var buttons = {
platformEvent: "button-knob",
1: {
name: "back",
pressed: 1,
held: 1+6
},
2: {
name: "enter",
pressed: 2,
held: 2+6
},
3: {
name: "menu",
pressed: 3,
held: 3+6
},
4: {
name: "knob",
pressed: 4,
held: 4+6
}
};
setupButton(powerButton);
setupButton(buttons);
function setupButton(buttonConfig) {
buttonConfig._button = new Button(buttonConfig.platformEvent);
buttonConfig._btnPowerPressedTimer = null;
buttonConfig._button.on('press', function(code) {
if(code && buttonConfig[code]) {
//console.log("button", buttonConfig[code].name, "pressed");
buttonConfig[code]._pressed = true;
inputs.emit('B', buttonConfig[code].pressed);
if(buttonConfig[code]._btnPowerPressedTimer != null) clearTimeout(buttonConfig[code]._btnPowerPressedTimer);
buttonConfig[code]._btnPowerPressedTimer = setTimeout(function(){
inputs.emit('B', buttonConfig[code].held);
}, HOLD_TIME);
}
});
buttonConfig._button.on('release', function(code) {
if(code && buttonConfig[code]) {
//console.log("button", buttonConfig[code].name, "released");
buttonConfig[code]._pressed = false;
if(buttonConfig[code]._btnPowerPressedTimer != null) clearTimeout(buttonConfig[code]._btnPowerPressedTimer);
}
});
buttonConfig._button.on('error', function(err) {
console.log("button error: ", buttonConfig.name, err);
});
}
exec("killall gesture");
exec("killall inputs");
var options = {};
var mcuSetup = false;
inputs.start = function(knobOptions) {
options = knobOptions;
if(knobOptions.knob) {
stop = false;
if(inputsRunning) return;
inputsProcess = spawn(INPUTS_BIN_PATH);
inputsRunning = true;
console.log("inputs process started");
inputsProcess.stdout.on('data', function(chunk) {
//console.log("inputs stdin: " + chunk.toString());
var matches = chunk.toString().match(/([A-Z])=([A-Z0-9\-]+)/);
if (matches && matches.length > 1) {
if(matches[1] == 'D') {
var dir = matches[2];
if(buttons['4']._pressed) dir += "+";
inputs.emit('D', dir);
}
}
});
inputsProcess.stderr.on('data', function(chunk) {
console.log("inputs stderr: " + chunk.toString());
chunk = null;
});
inputsProcess.on('close', function(code) {
console.log("inputs process exited");
inputsRunning = false;
if (!stop) {
setTimeout(function() {
if(!stop) inputs.start();
}, 500);
}
});
} else if(options.mcu) {
if(mcuSetup) return;
mcuSetup = true;
options.mcu.on('knob', function(val) {
k = 'U';
if(val < 0) {
k = 'D';
}
if(buttons['4']._pressed) k += "+";
inputs.emit('D', k);
});
}
}
inputs.startGesture = function() {
inputs.gestureStatus = "enabled";
db.get('gestureCalibration', function(err, gestureCalibration) {
if(err || !gestureCalibration) gestureCalibration = {};
gesture.setup(gestureCalibration, function(){
console.log("INPUTS: starting gesture sensor", (gestureCalibration.gUOffset ? "(calibrated)" : ""));
gesture.start();
});
});
}
inputs.calibrateGesture = function(statusCallback) {
gesture.calibrate(function(err, status, calResults) {
if(calResults) {
db.set('gestureCalibration', calResults);
gesture.start();
} else if(err) {
console.log("INPUTS: error calibrating gesture: ", err);
}
statusCallback && statusCallback(err, status, (calResults || err) ? true : false);
});
}
inputs.stop = function(callback) {
process.nextTick(function(){
stop = true;
stopGesture = true;
if (inputsRunning) {
console.log("inputs process exiting...");
try {
inputsProcess.stdin.write('\n\n\n');
inputsProcess.stdin.end();
} catch (e) {
console.log("input close error: ", e);
setTimeout(function(){
inputsProcess.kill();
}, 1000);
}
}
inputs.stopGesture();
if(callback) setTimeout(callback, 100); // give time for processes to exit
});
}
inputs.stopGesture = function() {
inputs.gestureStatus = "disabled";
gesture.stop();
gesture.disable();
}
module.exports = inputs; | hardware/inputs.js | require('rootpath')();
var INPUTS_BIN_PATH = "/home/view/current/bin/inputs";
var GESTURE_BIN_PATH = "/home/view/current/bin/gesture";
var GestureLib = require('apds-gesture');
var spawn = require('child_process').spawn;
var exec = require('child_process').exec;
var Button = require('gpio-button');
var db = require("system/db.js");
var EventEmitter = require("events").EventEmitter;
var inputs = new EventEmitter();
var gesture = GestureLib.use(2); //i2c port 2
gesture.on('ready', function() {
console.log("INPUTS: found a gesture sensor");
});
gesture.on('error', function(err) {
console.log("INPUTS: Gesture Error: ", err);
});
gesture.on('movement', function(dir) {
inputs.emit('G', dir.substr(0, 1).toUpperCase());
});
var inputsProcess = null;
var inputsRunning = false;
var gestureProcess = null;
var gestureRunning = false;
var stop = false;
var stopGesture = false;
var HOLD_TIME = 1500;
var powerButton = {
platformEvent: "1c2ac00.i2c-platform-axp20x-pek",
116: {
name: "power",
pressed: 5,
held: 6
}
}
var buttons = {
platformEvent: "button-knob",
1: {
name: "back",
pressed: 1,
held: 1+6
},
2: {
name: "enter",
pressed: 2,
held: 2+6
},
3: {
name: "menu",
pressed: 3,
held: 3+6
},
4: {
name: "knob",
pressed: 4,
held: 4+6
}
};
setupButton(powerButton);
setupButton(buttons);
function setupButton(buttonConfig) {
buttonConfig._button = new Button(buttonConfig.platformEvent);
buttonConfig._btnPowerPressedTimer = null;
buttonConfig._button.on('press', function(code) {
if(code && buttonConfig[code]) {
//console.log("button", buttonConfig[code].name, "pressed");
buttonConfig[code]._pressed = true;
inputs.emit('B', buttonConfig[code].pressed);
if(buttonConfig[code]._btnPowerPressedTimer != null) clearTimeout(buttonConfig[code]._btnPowerPressedTimer);
buttonConfig[code]._btnPowerPressedTimer = setTimeout(function(){
inputs.emit('B', buttonConfig[code].held);
}, HOLD_TIME);
}
});
buttonConfig._button.on('release', function(code) {
if(code && buttonConfig[code]) {
//console.log("button", buttonConfig[code].name, "released");
buttonConfig[code]._pressed = false;
if(buttonConfig[code]._btnPowerPressedTimer != null) clearTimeout(buttonConfig[code]._btnPowerPressedTimer);
}
});
buttonConfig._button.on('error', function(err) {
console.log("button error: ", buttonConfig.name, err);
});
}
exec("killall gesture");
exec("killall inputs");
var options = {};
var mcuSetup = false;
inputs.start = function(knobOptions) {
options = knobOptions;
if(knobOptions.knob) {
stop = false;
if(inputsRunning) return;
inputsProcess = spawn(INPUTS_BIN_PATH);
inputsRunning = true;
console.log("inputs process started");
inputsProcess.stdout.on('data', function(chunk) {
//console.log("inputs stdin: " + chunk.toString());
var matches = chunk.toString().match(/([A-Z])=([A-Z0-9\-]+)/);
if (matches && matches.length > 1) {
if(matches[1] == 'D') {
var dir = matches[2];
if(buttons['4']._pressed) dir += "+";
inputs.emit('D', dir);
}
}
});
inputsProcess.stderr.on('data', function(chunk) {
console.log("inputs stderr: " + chunk.toString());
chunk = null;
});
inputsProcess.on('close', function(code) {
console.log("inputs process exited");
inputsRunning = false;
if (!stop) {
setTimeout(function() {
if(!stop) inputs.start();
}, 500);
}
});
} else if(options.mcu) {
if(mcuSetup) return;
mcuSetup = true;
options.mcu.on('knob', function(val) {
k = 'U';
if(val < 0) {
k = 'D';
}
if(buttons['4']._pressed) k += "+";
inputs.emit('D', k);
});
}
}
inputs.startGesture = function() {
inputs.gestureStatus = "enabled";
db.get('gestureCalibration', function(err, gestureCalibration) {
if(err || !gestureCalibration) gestureCalibration = {};
gesture.setup(gestureCalibration, function(){
console.log("INPUTS: starting gesture sensor", (gestureCalibration.gUOffset ? "(calibrated)" : ""));
gesture.start();
});
});
}
inputs.calibrateGesture = function(statusCallback) {
gesture.calibrate(function(err, status, calResults) {
if(calResults) {
db.set('gestureCalibration', calResults);
gesture.start();
} else if(err) {
console.log("INPUTS: error calibrating gesture: ", err);
}
statusCallback && statusCallback(err, status, (calResults || err) ? true : false);
});
}
inputs.stop = function(callback) {
process.nextTick(function(){
stop = true;
stopGesture = true;
if (inputsRunning) {
console.log("inputs process exiting...");
try {
inputsProcess.stdin.write('\n\n\n');
inputsProcess.stdin.end();
} catch (e) {
console.log("input close error: ", e);
setTimeout(function(){
inputsProcess.kill();
}, 1000);
}
}
inputs.stopGesture();
if(callback) setTimeout(callback, 100); // give time for processes to exit
});
}
inputs.stopGesture = function() {
inputs.gestureStatus = "disabled";
gesture.stop();
gesture.disable();
}
module.exports = inputs; | using GPIO interrupt for gesture sensor
| hardware/inputs.js | using GPIO interrupt for gesture sensor | <ide><path>ardware/inputs.js
<ide>
<ide> var inputs = new EventEmitter();
<ide>
<del>var gesture = GestureLib.use(2); //i2c port 2
<add>VAR GESTURE_INT_GPIO = 72;
<add>VAR GESTURE_I2C_BUS = 2;
<add>
<add>var gesture = GestureLib.use(GESTURE_I2C_BUS, GESTURE_INT_GPIO);
<ide>
<ide> gesture.on('ready', function() {
<ide> console.log("INPUTS: found a gesture sensor"); |
|
JavaScript | bsd-3-clause | ce04569ed2ec37177a5a994b5a63c4b7fce8edad | 0 | neonstalwart/mongo-perstore | module.exports = MongoPerstore;
var mongoRql = require('mongo-rql'),
Q = require('q'),
DuplicateEntryError = require('perstore/errors').DuplicateEntryError;
function MongoPerstore(options) {
if (!this instanceof MongoPerstore) {
return new MongoPerstore(options);
}
options = options || {};
if (!options.db) {
throw new Error('a db must be provided to MongoPerstore');
}
if (!options.collection) {
throw new Error('a collection name must be provided to MongoPerstore');
}
this.db = options.db;
this.collection = options.collection;
}
MongoPerstore.prototype = {
constructor: MongoPerstore,
idProperty: 'id',
get: function (id) {
var idProperty = this.idProperty;
return this._getCollection().then(function (collection) {
var query = {};
query[ idProperty ] = id;
return Q.ninvoke(collection, 'findOne', query, { fields: { _id: 0 } });
});
},
put: function (value, options) {
options = options || {};
var store = this,
idProperty = store.idProperty,
key = 'id' in options ?
value[ idProperty ] = options.id :
// TODO: assign an id
value[ idProperty ];
return store._getCollection().then(function (collection) {
var result,
query;
if (options.overwrite === false) {
result = store.get(key)
.then(function (doc) {
if (doc === null) {
return Q.ninvoke(collection, 'insert', value);
}
else {
throw new DuplicateEntryError(key + ' exists, and can\'t be overwritten');
}
});
}
else {
query = {};
query[ idProperty ] = key;
// TODO: any options needed apart from upsert
result = Q.ninvoke(collection, 'update', query, value, { upsert: options.overwrite });
}
return result.then(function () {
delete value._id;
return key;
});
});
},
delete: function (id) {
var idProperty = this.idProperty;
return this._getCollection().then(function (collection) {
var query = {};
query[ idProperty ] = id;
return Q.ninvoke(collection, 'remove', query);
});
},
query: function (query, options) {
// convert rql query to mongodb query
var mongoQuery = mongoRql(query, options),
criteria = mongoQuery.criteria,
fields = mongoQuery.projection || {},
dbOptions = {
skip: mongoQuery.skip,
limit: mongoQuery.limit,
fields: fields,
sort: mongoQuery.sort
};
fields._id = 0;
return this._getCollection().then(function (collection) {
return Q.ninvoke(collection, 'find', criteria, dbOptions)
.then(function (cursor) {
return Q.all([
Q.ninvoke(cursor, 'count'),
Q.ninvoke(cursor, 'toArray')
])
.spread(function (totalCount, items) {
items.totalCount = totalCount;
return items;
});
});
});
},
_getCollection: function () {
var collection = this.collection;
return Q.when(this.db).then(function (db) {
return db.collection(collection);
});
}
};
| index.js | module.exports = MongoPerstore;
var mongoRql = require('mongo-rql'),
Q = require('q'),
DuplicateEntryError = require('perstore/errors').DuplicateEntryError;
function MongoPerstore(options) {
if (!this instanceof MongoPerstore) {
return new MongoPerstore(options);
}
options = options || {};
if (!options.db) {
throw new Error('a db must be provided to MongoPerstore');
}
if (!options.collection) {
throw new Error('a collection name must be provided to MongoPerstore');
}
this.db = options.db;
this.collection = options.collection;
}
MongoPerstore.prototype = {
constructor: MongoPerstore,
idProperty: 'id',
get: function (id) {
var idProperty = this.idProperty;
return this._getCollection().then(function (collection) {
var query = {};
query[ idProperty ] = id;
return Q.ninvoke(collection, 'findOne', query)
.then(stripObjectId);
});
},
put: function (value, options) {
options = options || {};
var store = this,
idProperty = store.idProperty,
key = 'id' in options ?
value[ idProperty ] = options.id :
// TODO: assign an id
value[ idProperty ];
return store._getCollection().then(function (collection) {
var result,
query;
if (options.overwrite === false) {
result = store.get(key)
.then(function (doc) {
if (doc === null) {
return Q.ninvoke(collection, 'insert', value);
}
else {
throw new DuplicateEntryError(key + ' exists, and can\'t be overwritten');
}
});
}
else {
query = {};
query[ idProperty ] = key;
// TODO: any options needed apart from upsert
result = Q.ninvoke(collection, 'update', query, value, { upsert: options.overwrite });
}
return result.then(function () {
delete value._id;
return key;
});
});
},
delete: function (id) {
var idProperty = this.idProperty;
return this._getCollection().then(function (collection) {
var query = {};
query[ idProperty ] = id;
return Q.ninvoke(collection, 'remove', query);
});
},
query: function (query, options) {
// convert rql query to mongodb query
var mongoQuery = mongoRql(query, options),
criteria = mongoQuery.criteria,
dbOptions = {
skip: mongoQuery.skip,
limit: mongoQuery.limit,
fields: mongoQuery.projection,
sort: mongoQuery.sort
};
return this._getCollection().then(function (collection) {
return Q.ninvoke(collection, 'find', criteria, dbOptions)
.then(function (cursor) {
return Q.all([
Q.ninvoke(cursor, 'count'),
Q.ninvoke(cursor, 'toArray')
])
.spread(function (totalCount, items) {
items = items.map(stripObjectId);
items.totalCount = totalCount;
return items;
});
});
});
},
_getCollection: function () {
var collection = this.collection;
return Q.when(this.db).then(function (db) {
return db.collection(collection);
});
}
};
function stripObjectId(item) {
if (item) {
delete item._id;
}
return item;
}
| idiomatic exclusion of _id field
| index.js | idiomatic exclusion of _id field | <ide><path>ndex.js
<ide>
<ide> query[ idProperty ] = id;
<ide>
<del> return Q.ninvoke(collection, 'findOne', query)
<del> .then(stripObjectId);
<add> return Q.ninvoke(collection, 'findOne', query, { fields: { _id: 0 } });
<ide> });
<ide> },
<ide>
<ide> // convert rql query to mongodb query
<ide> var mongoQuery = mongoRql(query, options),
<ide> criteria = mongoQuery.criteria,
<add> fields = mongoQuery.projection || {},
<ide> dbOptions = {
<ide> skip: mongoQuery.skip,
<ide> limit: mongoQuery.limit,
<del> fields: mongoQuery.projection,
<add> fields: fields,
<ide> sort: mongoQuery.sort
<ide> };
<add>
<add> fields._id = 0;
<ide>
<ide> return this._getCollection().then(function (collection) {
<ide> return Q.ninvoke(collection, 'find', criteria, dbOptions)
<ide> Q.ninvoke(cursor, 'toArray')
<ide> ])
<ide> .spread(function (totalCount, items) {
<del> items = items.map(stripObjectId);
<ide> items.totalCount = totalCount;
<ide> return items;
<ide> });
<ide> });
<ide> }
<ide> };
<del>
<del>function stripObjectId(item) {
<del> if (item) {
<del> delete item._id;
<del> }
<del>
<del> return item;
<del>} |
|
Java | apache-2.0 | 83d1e07dbf490963dcb76e15a9fc368b1d302653 | 0 | StevenLeRoux/warp10-platform,StevenLeRoux/warp10-platform,hbs/warp10-platform,cityzendata/warp10-platform,cityzendata/warp10-platform,cityzendata/warp10-platform,hbs/warp10-platform,StevenLeRoux/warp10-platform,hbs/warp10-platform,StevenLeRoux/warp10-platform,cityzendata/warp10-platform,hbs/warp10-platform | //
// Copyright 2017 Cityzen Data
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.warp10.standalone;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import io.warp10.continuum.TimeSource;
import io.warp10.continuum.gts.GTSDecoder;
import io.warp10.continuum.gts.GTSEncoder;
import io.warp10.continuum.sensision.SensisionConstants;
import io.warp10.sensision.Sensision;
public class InMemoryChunkSet {
// Keep track whether or not a GTSEncoder has all its timestamps in chronological order, speeds up fetching
//
private final GTSEncoder[] chunks;
/**
* End timestamp of each chunk
*/
private final long[] chunkends;
/**
* Flags indicating if timestamps are increasingly monotonic
*/
private final BitSet chronological;
/**
* Last timestamp encountered in a chunk
*/
private final long[] lasttimestamp;
/**
* Length of chunks in time units
*/
private final long chunklen;
/**
* Number of chunks
*/
private final int chunkcount;
public InMemoryChunkSet(int chunkcount, long chunklen) {
this.chunks = new GTSEncoder[chunkcount];
this.chunkends = new long[chunkcount];
this.chronological = new BitSet(chunkcount);
this.lasttimestamp = new long[chunkcount];
this.chunklen = chunklen;
this.chunkcount = chunkcount;
}
/**
* Store the content of a GTSEncoder in the various chunks we manage
*
* @param encoder The GTSEncoder instance to store
*/
public void store(GTSEncoder encoder) throws IOException {
// Get the current time
long now = TimeSource.getTime();
long lastChunkEnd = chunkEnd(now);
long firstChunkStart = lastChunkEnd - (chunkcount * chunklen) + 1;
// Get a decoder without copying the encoder array
GTSDecoder decoder = encoder.getUnsafeDecoder(false);
int lastchunk = -1;
GTSEncoder chunkEncoder = null;
while(decoder.next()) {
long timestamp = decoder.getTimestamp();
// Ignore timestamp if it is not in the valid range
if (timestamp < firstChunkStart || timestamp > lastChunkEnd) {
continue;
}
// Compute the chunkid
int chunkid = chunk(timestamp);
if (chunkid != lastchunk) {
chunkEncoder = null;
synchronized(this.chunks) {
// Is the chunk non existent or has expired?
if (null == this.chunks[chunkid] || this.chunkends[chunkid] < firstChunkStart) {
long end = chunkEnd(timestamp);
this.chunks[chunkid] = new GTSEncoder(0L);
this.lasttimestamp[chunkid] = end - this.chunklen;
this.chronological.set(chunkid);
this.chunkends[chunkid] = end;
}
chunkEncoder = this.chunks[chunkid];
if (timestamp < this.lasttimestamp[chunkid]) {
this.chronological.set(chunkid, false);
}
this.lasttimestamp[chunkid] = timestamp;
}
lastchunk = chunkid;
}
chunkEncoder.addValue(timestamp, decoder.getLocation(), decoder.getElevation(), decoder.getValue());
}
}
/**
* Compute the chunk id given a timestamp.
* @param timestamp
* @return
*/
private int chunk(long timestamp) {
int chunkid;
if (timestamp >= 0) {
chunkid = (int) ((timestamp / chunklen) % chunkcount);
} else {
chunkid = chunkcount + (int) ((((timestamp + 1) / chunklen) % chunkcount) - 1);
//chunkid = chunkcount - (int) ((- (timestamp + 1) / chunklen) % chunkcount);
}
return chunkid;
}
/**
* Compute the end timestamp of the chunk this timestamp
* belongs to.
*
* @param timestamp
* @return
*/
private long chunkEnd(long timestamp) {
long end;
if (timestamp > 0) {
end = ((timestamp / chunklen) * chunklen) + chunklen - 1;
} else {
end = ((((timestamp + 1) / chunklen) - 1) * chunklen) + chunklen - 1;
}
return end;
}
/**
* Fetches some data from this chunk set
*
* @param now The end timestamp to consider (inclusive).
* @param timespan The timespan or value count to consider.
* @return
*/
public GTSDecoder fetch(long now, long timespan) throws IOException {
return fetchEncoder(now, timespan).getUnsafeDecoder(false);
}
public List<GTSDecoder> getDecoders() {
List<GTSDecoder> decoders = new ArrayList<GTSDecoder>();
synchronized (this.chunks) {
for (int i = 0; i < 0; i++) {
if (null == this.chunks[i]) {
continue;
}
decoders.add(this.chunks[i].getUnsafeDecoder(false));
}
}
return decoders;
}
public GTSEncoder fetchEncoder(long now, long timespan) throws IOException {
// Clean up first
clean(TimeSource.getTime());
if (timespan < 0) {
return fetchCountEncoder(now, -timespan);
}
//
// Determine the chunk id of 'now'
// We offset it by chunkcount so we can safely decrement and
// still have a positive remainder when doing a modulus
//
int nowchunk = chunk(now) + this.chunkcount;
// Compute the first timestamp (included)
long firstTimestamp = now - timespan + 1;
GTSEncoder encoder = new GTSEncoder(0L);
for (int i = 0; i < this.chunkcount; i++) {
int chunk = (nowchunk - i) % this.chunkcount;
GTSDecoder chunkDecoder = null;
synchronized(this.chunks) {
// Ignore a given chunk if it does not intersect our current range
if (this.chunkends[chunk] < firstTimestamp || (this.chunkends[chunk] - this.chunklen) >= now) {
continue;
}
// Extract a decoder to scan the chunk
if (null != this.chunks[chunk]) {
chunkDecoder = this.chunks[chunk].getUnsafeDecoder(false);
}
}
if (null == chunkDecoder) {
continue;
}
// Merge the data from chunkDecoder which is in the requested range in 'encoder'
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now || ts < firstTimestamp) {
continue;
}
encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
}
}
return encoder;
}
private GTSDecoder fetchCount(long now, long count) throws IOException {
return fetchCountEncoder(now, count).getUnsafeDecoder(false);
}
private GTSEncoder fetchCountEncoder(long now, long count) throws IOException {
//
// Determine the chunk id of 'now'
// We offset it by chunkcount so we can safely decrement and
// still have a positive remainder when doing a modulus
//
int nowchunk = chunk(now) + this.chunkcount;
GTSEncoder encoder = new GTSEncoder();
// Initialize the number of datapoints to fetch
long nvalues = count;
// Loop over the chunks
for (int i = 0; i < this.chunkcount; i++) {
int chunk = (nowchunk - i) % this.chunkcount;
GTSDecoder chunkDecoder = null;
boolean inorder = true;
long chunkEnd = -1;
synchronized(this.chunks) {
// Ignore a given chunk if it is after 'now'
if (this.chunkends[chunk] - this.chunklen >= now) {
continue;
}
// Extract a decoder to scan the chunk
if (null != this.chunks[chunk]) {
chunkDecoder = this.chunks[chunk].getUnsafeDecoder(false);
inorder = this.chronological.get(chunk);
chunkEnd = this.chunkends[chunk];
}
}
if (null == chunkDecoder) {
continue;
}
// We now have a chunk, we will treat it differently depending if
// it is in chronological order or not
if (inorder) {
//
// If the end timestamp of the chunk is before 'now' and the
// chunk contains less than the remaining values we need to fetch
// we can add everything.
//
if (chunkEnd <= now && chunkDecoder.getCount() <= nvalues) {
while(chunkDecoder.next()) {
encoder.addValue(chunkDecoder.getTimestamp(), chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else if (chunkDecoder.getCount() <= nvalues) {
// We have a chunk with chunkEnd > 'now' but which contains less than nvalues,
// so we add all the values whose timestamp is <= 'now'
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now) {
// we can break because we know the encoder is in chronological order.
break;
}
encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else {
//
// The chunk has more values than what we need.
// If the end of the chunk is <= now then we know we must skip count - nvalues and
// add the rest to the result.
// Otherwise it's a little trickier
//
if (chunkEnd <= now) {
long skip = chunkDecoder.getCount() - nvalues;
while(skip > 0 && chunkDecoder.next()) {
skip--;
}
while(chunkDecoder.next()) {
encoder.addValue(chunkDecoder.getTimestamp(), chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else {
// We will transfer the datapoints whose timestamp is <= now in a an intermediate encoder
GTSEncoder intenc = new GTSEncoder();
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now) {
// we can break because we know the encoder is in chronological order.
break;
}
intenc.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
// Then transfer the intermediate encoder to the result
chunkDecoder = intenc.getUnsafeDecoder(false);
long skip = chunkDecoder.getCount() - nvalues;
while(skip > 0 && chunkDecoder.next()) {
skip--;
}
while(chunkDecoder.next()) {
encoder.addValue(chunkDecoder.getTimestamp(), chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
}
}
} else {
// The chunk decoder is not in chronological order...
// If the chunk decoder end is <= 'now' and the decoder contains less values than
// what is still needed, add everything.
if (chunkEnd <= now && chunkDecoder.getCount() <= nvalues) {
while(chunkDecoder.next()) {
encoder.addValue(chunkDecoder.getTimestamp(), chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else if(chunkDecoder.getCount() <= nvalues) {
// We have a chunk with chunkEnd > 'now' but which contains less than nvalues,
// so we add all the values whose timestamp is <= 'now'
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now) {
// we skip the value as the encoder is not in chronological order
continue;
}
encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else {
// We have a chunk which has more values than what we need and/or whose end
// is after 'now'
// We will transfer the datapoints whose timestamp is <= now in a an intermediate encoder
GTSEncoder intenc = new GTSEncoder();
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now) {
continue;
}
intenc.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
}
//
// Now we need to extract the ticks of the intermediary encoder
//
chunkDecoder = intenc.getUnsafeDecoder(false);
long[] ticks = new long[(int) chunkDecoder.getCount()];
int k = 0;
while(chunkDecoder.next()) {
ticks[k++] = chunkDecoder.getTimestamp();
}
// Now sort the ticks
Arrays.sort(ticks);
// We must skip values whose timestamp is <= ticks[ticks.length - nvalues]
if (ticks.length > nvalues) {
long skipbelow = ticks[ticks.length - (int) nvalues];
// Then transfer the intermediate encoder to the result
chunkDecoder = intenc.getUnsafeDecoder(false);
while(chunkDecoder.next() && nvalues > 0) {
long ts = chunkDecoder.getTimestamp();
if (ts < skipbelow) {
continue;
}
encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else {
// The intermediary decoder has less than nvalues whose ts is <= now, transfer everything
chunkDecoder = intenc.getUnsafeDecoder(false);
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
}
}
}
}
return encoder;
}
/**
* Compute the total number of datapoints stored in this chunk set.
*
* @return
*/
public long getCount() {
long count = 0L;
for (GTSEncoder encoder: chunks) {
if (null != encoder) {
count += encoder.getCount();
}
}
return count;
}
/**
* Compute the total size occupied by the encoders in this chunk set
*
* @return
*/
public long getSize() {
long size = 0L;
for (GTSEncoder encoder: chunks) {
if (null != encoder) {
size += encoder.size();
}
}
return size;
}
/**
* Clean expired chunks according to 'now'
*
* @param now
*/
public long clean(long now) {
long cutoff = chunkEnd(now) - this.chunkcount * this.chunklen;
int dropped = 0;
long droppedDatapoints = 0L;
synchronized(this.chunks) {
for (int i = 0; i < this.chunks.length; i++) {
if (null == this.chunks[i]) {
continue;
}
if (this.chunkends[i] <= cutoff) {
droppedDatapoints += this.chunks[i].getCount();
this.chunks[i] = null;
dropped++;
}
}
}
Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_STANDALONE_INMEMORY_GC_CHUNKS, Sensision.EMPTY_LABELS, dropped);
return droppedDatapoints;
}
/**
* Optimize all non current chunks by shrinking their buffers.
*
* @param now
*/
long optimize(CapacityExtractorOutputStream out, long now, AtomicLong allocation) {
int currentChunk = chunk(now);
long reclaimed = 0L;
synchronized(this.chunks) {
for (int i = 0; i < this.chunks.length; i++) {
if (null == this.chunks[i] || i == currentChunk) {
continue;
}
int size = this.chunks[i].size();
try {
this.chunks[i].writeTo(out);
int capacity = out.getCapacity();
if (capacity > size) {
this.chunks[i].resize(size);
allocation.addAndGet(size);
reclaimed += (capacity - size);
}
} catch (IOException ioe) {
}
}
}
return reclaimed;
}
}
| warp10/src/main/java/io/warp10/standalone/InMemoryChunkSet.java | //
// Copyright 2017 Cityzen Data
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.warp10.standalone;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import io.warp10.continuum.TimeSource;
import io.warp10.continuum.gts.GTSDecoder;
import io.warp10.continuum.gts.GTSEncoder;
import io.warp10.continuum.sensision.SensisionConstants;
import io.warp10.sensision.Sensision;
public class InMemoryChunkSet {
// Keep track whether or not a GTSEncoder has all its timestamps in chronological order, speeds up fetching
//
private final GTSEncoder[] chunks;
/**
* End timestamp of each chunk
*/
private final long[] chunkends;
/**
* Flags indicating if timestamps are increasingly monotonic
*/
private final BitSet chronological;
/**
* Last timestamp encountered in a chunk
*/
private final long[] lasttimestamp;
/**
* Length of chunks in time units
*/
private final long chunklen;
/**
* Number of chunks
*/
private final int chunkcount;
public InMemoryChunkSet(int chunkcount, long chunklen) {
this.chunks = new GTSEncoder[chunkcount];
this.chunkends = new long[chunkcount];
this.chronological = new BitSet(chunkcount);
this.lasttimestamp = new long[chunkcount];
this.chunklen = chunklen;
this.chunkcount = chunkcount;
}
/**
* Store the content of a GTSEncoder in the various chunks we manage
*
* @param encoder The GTSEncoder instance to store
*/
public void store(GTSEncoder encoder) throws IOException {
// Get the current time
long now = TimeSource.getTime();
long lastChunkEnd = chunkEnd(now);
long firstChunkStart = lastChunkEnd - (chunkcount * chunklen) + 1;
// Get a decoder without copying the encoder array
GTSDecoder decoder = encoder.getUnsafeDecoder(false);
int lastchunk = -1;
GTSEncoder chunkEncoder = null;
while(decoder.next()) {
long timestamp = decoder.getTimestamp();
// Ignore timestamp if it is not in the valid range
if (timestamp < firstChunkStart || timestamp > lastChunkEnd) {
continue;
}
// Compute the chunkid
int chunkid = chunk(timestamp);
if (chunkid != lastchunk) {
chunkEncoder = null;
synchronized(this.chunks) {
// Is the chunk non existent or has expired?
if (null == this.chunks[chunkid] || this.chunkends[chunkid] < firstChunkStart) {
long end = chunkEnd(timestamp);
this.chunks[chunkid] = new GTSEncoder(0L);
this.lasttimestamp[chunkid] = end - this.chunklen;
this.chronological.set(chunkid);
this.chunkends[chunkid] = end;
}
chunkEncoder = this.chunks[chunkid];
if (timestamp < this.lasttimestamp[chunkid]) {
this.chronological.set(chunkid, false);
}
this.lasttimestamp[chunkid] = timestamp;
}
lastchunk = chunkid;
}
chunkEncoder.addValue(timestamp, decoder.getLocation(), decoder.getElevation(), decoder.getValue());
}
}
/**
* Compute the chunk id given a timestamp.
* @param timestamp
* @return
*/
private int chunk(long timestamp) {
int chunkid;
if (timestamp >= 0) {
chunkid = (int) ((timestamp / chunklen) % chunkcount);
} else {
chunkid = chunkcount + (int) ((((timestamp + 1) / chunklen) % chunkcount) - 1);
//chunkid = chunkcount - (int) ((- (timestamp + 1) / chunklen) % chunkcount);
}
return chunkid;
}
/**
* Compute the end timestamp of the chunk this timestamp
* belongs to.
*
* @param timestamp
* @return
*/
private long chunkEnd(long timestamp) {
long end;
if (timestamp > 0) {
end = ((timestamp / chunklen) * chunklen) + chunklen - 1;
} else {
end = ((((timestamp + 1) / chunklen) - 1) * chunklen) + chunklen - 1;
}
return end;
}
/**
* Fetches some data from this chunk set
*
* @param now The end timestamp to consider (inclusive).
* @param timespan The timespan or value count to consider.
* @return
*/
public GTSDecoder fetch(long now, long timespan) throws IOException {
return fetchEncoder(now, timespan).getUnsafeDecoder(false);
}
public List<GTSDecoder> getDecoders() {
List<GTSDecoder> decoders = new ArrayList<GTSDecoder>();
synchronized (this.chunks) {
for (int i = 0; i < 0; i++) {
if (null == this.chunks[i]) {
continue;
}
decoders.add(this.chunks[i].getUnsafeDecoder(false));
}
}
return decoders;
}
public GTSEncoder fetchEncoder(long now, long timespan) throws IOException {
// Clean up first
clean(TimeSource.getTime());
if (timespan < 0) {
return fetchCountEncoder(now, -timespan);
}
//
// Determine the chunk id of 'now'
// We offset it by chunkcount so we can safely decrement and
// still have a positive remainder when doing a modulus
//
int nowchunk = chunk(now) + this.chunkcount;
// Compute the first timestamp (included)
long firstTimestamp = now - timespan + 1;
GTSEncoder encoder = new GTSEncoder(0L);
for (int i = 0; i < this.chunkcount; i++) {
int chunk = (nowchunk - i) % this.chunkcount;
GTSDecoder chunkDecoder = null;
synchronized(this.chunks) {
// Ignore a given chunk if it does not intersect our current range
if (this.chunkends[chunk] < firstTimestamp || (this.chunkends[chunk] - this.chunklen) >= now) {
continue;
}
// Extract a decoder to scan the chunk
if (null != this.chunks[chunk]) {
chunkDecoder = this.chunks[chunk].getUnsafeDecoder(false);
}
}
if (null == chunkDecoder) {
continue;
}
// Merge the data from chunkDecoder which is in the requested range in 'encoder'
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now || ts < firstTimestamp) {
continue;
}
encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
}
}
return encoder;
}
private GTSDecoder fetchCount(long now, long count) throws IOException {
return fetchCountEncoder(now, count).getUnsafeDecoder(false);
}
private GTSEncoder fetchCountEncoder(long now, long count) throws IOException {
//
// Determine the chunk id of 'now'
// We offset it by chunkcount so we can safely decrement and
// still have a positive remainder when doing a modulus
//
int nowchunk = chunk(now) + this.chunkcount;
GTSEncoder encoder = new GTSEncoder();
// Initialize the number of datapoints to fetch
long nvalues = count;
// Loop over the chunks
for (int i = 0; i < this.chunkcount; i++) {
int chunk = (nowchunk - i) % this.chunkcount;
GTSDecoder chunkDecoder = null;
boolean inorder = true;
long chunkEnd = -1;
synchronized(this.chunks) {
// Ignore a given chunk if it is after 'now'
if (this.chunkends[chunk] - this.chunklen >= now) {
continue;
}
// Extract a decoder to scan the chunk
if (null != this.chunks[chunk]) {
chunkDecoder = this.chunks[chunk].getUnsafeDecoder(false);
inorder = this.chronological.get(chunk);
chunkEnd = this.chunkends[chunk];
}
}
if (null == chunkDecoder) {
continue;
}
// We now have a chunk, we will treat it differently depending if
// it is in chronological order or not
if (inorder) {
//
// If the end timestamp of the chunk is before 'now' and the
// chunk contains less than the remaining values we need to fetch
// we can add everything.
//
if (chunkEnd <= now && chunkDecoder.getCount() <= nvalues) {
while(chunkDecoder.next()) {
encoder.addValue(chunkDecoder.getTimestamp(), chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else if (chunkDecoder.getCount() <= nvalues) {
// We have a chunk with chunkEnd > 'now' but which contains less than nvalues,
// so we add all the values whose timestamp is <= 'now'
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now) {
// we can break because we know the encoder is in chronological order.
break;
}
encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else {
//
// The chunk has more values than what we need.
// If the end of the chunk is <= now then we know we must skip count - nvalues and
// add the rest to the result.
// Otherwise it's a little trickier
//
if (chunkEnd <= now) {
long skip = chunkDecoder.getCount() - nvalues;
while(skip > 0 && chunkDecoder.next()) {
skip--;
}
while(chunkDecoder.next()) {
encoder.addValue(chunkDecoder.getTimestamp(), chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else {
// We will transfer the datapoints whose timestamp is <= now in a an intermediate encoder
GTSEncoder intenc = new GTSEncoder();
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now) {
// we can break because we know the encoder is in chronological order.
break;
}
intenc.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
// Then transfer the intermediate encoder to the result
chunkDecoder = intenc.getUnsafeDecoder(false);
long skip = chunkDecoder.getCount() - nvalues;
while(skip > 0 && chunkDecoder.next()) {
skip--;
}
while(chunkDecoder.next()) {
encoder.addValue(chunkDecoder.getTimestamp(), chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
}
}
} else {
// The chunk decoder is not in chronological order...
// If the chunk decoder end is <= 'now' and the decoder contains less values than
// what is still needed, add everything.
if (chunkEnd <= now && chunkDecoder.getCount() <= nvalues) {
while(chunkDecoder.next()) {
encoder.addValue(chunkDecoder.getTimestamp(), chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else if(chunkDecoder.getCount() <= nvalues) {
// We have a chunk with chunkEnd > 'now' but which contains less than nvalues,
// so we add all the values whose timestamp is <= 'now'
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now) {
// we skip the value as the encoder is not in chronological order
continue;
}
encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
} else {
// We have a chunk which has more values than what we need and/or whose end
// is after 'now'
// We will transfer the datapoints whose timestamp is <= now in a an intermediate encoder
GTSEncoder intenc = new GTSEncoder();
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts > now) {
continue;
}
intenc.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
}
//
// Now we need to extract the ticks of the intermediary encoder
//
chunkDecoder = intenc.getUnsafeDecoder(false);
long[] ticks = new long[(int) chunkDecoder.getCount()];
int k = 0;
while(chunkDecoder.next()) {
ticks[k++] = chunkDecoder.getTimestamp();
}
// Now sort the ticks
Arrays.sort(ticks);
// We must skip values whose timestamp is <= ticks[ticks.length - nvalues]
long skipbelow = ticks[ticks.length - (int) nvalues];
// Then transfer the intermediate encoder to the result
chunkDecoder = intenc.getUnsafeDecoder(false);
while(chunkDecoder.next()) {
long ts = chunkDecoder.getTimestamp();
if (ts < skipbelow) {
continue;
}
encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
nvalues--;
}
}
}
}
return encoder;
}
/**
* Compute the total number of datapoints stored in this chunk set.
*
* @return
*/
public long getCount() {
long count = 0L;
for (GTSEncoder encoder: chunks) {
if (null != encoder) {
count += encoder.getCount();
}
}
return count;
}
/**
* Compute the total size occupied by the encoders in this chunk set
*
* @return
*/
public long getSize() {
long size = 0L;
for (GTSEncoder encoder: chunks) {
if (null != encoder) {
size += encoder.size();
}
}
return size;
}
/**
* Clean expired chunks according to 'now'
*
* @param now
*/
public long clean(long now) {
long cutoff = chunkEnd(now) - this.chunkcount * this.chunklen;
int dropped = 0;
long droppedDatapoints = 0L;
synchronized(this.chunks) {
for (int i = 0; i < this.chunks.length; i++) {
if (null == this.chunks[i]) {
continue;
}
if (this.chunkends[i] <= cutoff) {
droppedDatapoints += this.chunks[i].getCount();
this.chunks[i] = null;
dropped++;
}
}
}
Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_STANDALONE_INMEMORY_GC_CHUNKS, Sensision.EMPTY_LABELS, dropped);
return droppedDatapoints;
}
/**
* Optimize all non current chunks by shrinking their buffers.
*
* @param now
*/
long optimize(CapacityExtractorOutputStream out, long now, AtomicLong allocation) {
int currentChunk = chunk(now);
long reclaimed = 0L;
synchronized(this.chunks) {
for (int i = 0; i < this.chunks.length; i++) {
if (null == this.chunks[i] || i == currentChunk) {
continue;
}
int size = this.chunks[i].size();
try {
this.chunks[i].writeTo(out);
int capacity = out.getCapacity();
if (capacity > size) {
this.chunks[i].resize(size);
allocation.addAndGet(size);
reclaimed += (capacity - size);
}
} catch (IOException ioe) {
}
}
}
return reclaimed;
}
}
| Corrected case when the intermediary decoder contains less than the number of requested values.
| warp10/src/main/java/io/warp10/standalone/InMemoryChunkSet.java | Corrected case when the intermediary decoder contains less than the number of requested values. | <ide><path>arp10/src/main/java/io/warp10/standalone/InMemoryChunkSet.java
<ide> // Now sort the ticks
<ide> Arrays.sort(ticks);
<ide> // We must skip values whose timestamp is <= ticks[ticks.length - nvalues]
<del> long skipbelow = ticks[ticks.length - (int) nvalues];
<ide>
<del> // Then transfer the intermediate encoder to the result
<del> chunkDecoder = intenc.getUnsafeDecoder(false);
<del> while(chunkDecoder.next()) {
<del> long ts = chunkDecoder.getTimestamp();
<del> if (ts < skipbelow) {
<del> continue;
<del> }
<del> encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
<del> nvalues--;
<del> }
<add> if (ticks.length > nvalues) {
<add> long skipbelow = ticks[ticks.length - (int) nvalues];
<add>
<add> // Then transfer the intermediate encoder to the result
<add> chunkDecoder = intenc.getUnsafeDecoder(false);
<add> while(chunkDecoder.next() && nvalues > 0) {
<add> long ts = chunkDecoder.getTimestamp();
<add> if (ts < skipbelow) {
<add> continue;
<add> }
<add> encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
<add> nvalues--;
<add> }
<add> } else {
<add> // The intermediary decoder has less than nvalues whose ts is <= now, transfer everything
<add> chunkDecoder = intenc.getUnsafeDecoder(false);
<add> while(chunkDecoder.next()) {
<add> long ts = chunkDecoder.getTimestamp();
<add> encoder.addValue(ts, chunkDecoder.getLocation(), chunkDecoder.getElevation(), chunkDecoder.getValue());
<add> nvalues--;
<add> }
<add> }
<ide> }
<ide> }
<ide> }
<add>
<ide> return encoder;
<ide> }
<ide> |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.